var/home/core/zuul-output/0000755000175000017500000000000015113773131014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114010007015456 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005365577415114010000017701 0ustar rootrootDec 03 08:39:51 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 08:39:51 crc restorecon[4575]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:51 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 08:39:52 crc restorecon[4575]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 08:39:53 crc kubenswrapper[4576]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.163666 4576 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170853 4576 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170883 4576 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170889 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170895 4576 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170900 4576 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170905 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170912 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170917 4576 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170921 4576 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170926 4576 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170931 4576 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170937 4576 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170942 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170946 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170953 4576 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170960 4576 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170966 4576 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170971 4576 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170976 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170981 4576 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170986 4576 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170991 4576 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.170996 4576 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171001 4576 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171006 4576 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171011 4576 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171015 4576 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171025 4576 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171030 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171035 4576 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171042 4576 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171049 4576 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171056 4576 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171062 4576 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171069 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171074 4576 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171080 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171085 4576 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171099 4576 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171104 4576 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171114 4576 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171119 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171129 4576 feature_gate.go:330] unrecognized feature gate: Example Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171139 4576 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171144 4576 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171149 4576 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171153 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171159 4576 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171164 4576 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171168 4576 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171173 4576 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171181 4576 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171186 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171191 4576 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171198 4576 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171203 4576 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171208 4576 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171213 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171219 4576 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171224 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171229 4576 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171234 4576 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171239 4576 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171244 4576 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171249 4576 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171254 4576 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171259 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171264 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171269 4576 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171275 4576 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.171280 4576 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171427 4576 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171439 4576 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171448 4576 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171455 4576 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171462 4576 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171468 4576 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171479 4576 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171486 4576 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171492 4576 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171498 4576 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171504 4576 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171511 4576 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171517 4576 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171548 4576 flags.go:64] FLAG: --cgroup-root="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171554 4576 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171560 4576 flags.go:64] FLAG: --client-ca-file="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171565 4576 flags.go:64] FLAG: --cloud-config="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171571 4576 flags.go:64] FLAG: --cloud-provider="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171576 4576 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171584 4576 flags.go:64] FLAG: --cluster-domain="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171590 4576 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171596 4576 flags.go:64] FLAG: --config-dir="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171601 4576 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171607 4576 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171614 4576 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171620 4576 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171626 4576 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171632 4576 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171637 4576 flags.go:64] FLAG: --contention-profiling="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171644 4576 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171650 4576 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171656 4576 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171662 4576 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171668 4576 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171674 4576 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171680 4576 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171686 4576 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171691 4576 flags.go:64] FLAG: --enable-server="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171698 4576 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171705 4576 flags.go:64] FLAG: --event-burst="100" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171711 4576 flags.go:64] FLAG: --event-qps="50" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171716 4576 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171722 4576 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171728 4576 flags.go:64] FLAG: --eviction-hard="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171742 4576 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171748 4576 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171754 4576 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171761 4576 flags.go:64] FLAG: --eviction-soft="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171766 4576 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171772 4576 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171778 4576 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171784 4576 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171789 4576 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171794 4576 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171800 4576 flags.go:64] FLAG: --feature-gates="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171807 4576 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171813 4576 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171819 4576 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171824 4576 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171830 4576 flags.go:64] FLAG: --healthz-port="10248" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171836 4576 flags.go:64] FLAG: --help="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171842 4576 flags.go:64] FLAG: --hostname-override="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171847 4576 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171854 4576 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171859 4576 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171865 4576 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171871 4576 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171876 4576 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171882 4576 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171887 4576 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171893 4576 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171899 4576 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171905 4576 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171910 4576 flags.go:64] FLAG: --kube-reserved="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171916 4576 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171921 4576 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171927 4576 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171933 4576 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171938 4576 flags.go:64] FLAG: --lock-file="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171944 4576 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171950 4576 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171956 4576 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171964 4576 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171970 4576 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171976 4576 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171981 4576 flags.go:64] FLAG: --logging-format="text" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171987 4576 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171993 4576 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.171999 4576 flags.go:64] FLAG: --manifest-url="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172004 4576 flags.go:64] FLAG: --manifest-url-header="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172011 4576 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172017 4576 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172024 4576 flags.go:64] FLAG: --max-pods="110" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172030 4576 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172036 4576 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172042 4576 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172048 4576 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172054 4576 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172060 4576 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172066 4576 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172079 4576 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172084 4576 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172091 4576 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172097 4576 flags.go:64] FLAG: --pod-cidr="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172102 4576 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172110 4576 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172116 4576 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172122 4576 flags.go:64] FLAG: --pods-per-core="0" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172128 4576 flags.go:64] FLAG: --port="10250" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172134 4576 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172139 4576 flags.go:64] FLAG: --provider-id="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172145 4576 flags.go:64] FLAG: --qos-reserved="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172159 4576 flags.go:64] FLAG: --read-only-port="10255" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172165 4576 flags.go:64] FLAG: --register-node="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172171 4576 flags.go:64] FLAG: --register-schedulable="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172176 4576 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172198 4576 flags.go:64] FLAG: --registry-burst="10" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172204 4576 flags.go:64] FLAG: --registry-qps="5" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172210 4576 flags.go:64] FLAG: --reserved-cpus="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172216 4576 flags.go:64] FLAG: --reserved-memory="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172224 4576 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172230 4576 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172235 4576 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172241 4576 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172247 4576 flags.go:64] FLAG: --runonce="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172252 4576 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172258 4576 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172264 4576 flags.go:64] FLAG: --seccomp-default="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172270 4576 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172276 4576 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172282 4576 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172287 4576 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172293 4576 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172299 4576 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172304 4576 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172310 4576 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172316 4576 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172322 4576 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172328 4576 flags.go:64] FLAG: --system-cgroups="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172333 4576 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172342 4576 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172347 4576 flags.go:64] FLAG: --tls-cert-file="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172353 4576 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172360 4576 flags.go:64] FLAG: --tls-min-version="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172366 4576 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172372 4576 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172377 4576 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172383 4576 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172389 4576 flags.go:64] FLAG: --v="2" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172397 4576 flags.go:64] FLAG: --version="false" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172405 4576 flags.go:64] FLAG: --vmodule="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172411 4576 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.172418 4576 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172593 4576 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172602 4576 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172609 4576 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172614 4576 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172620 4576 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172627 4576 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172633 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172639 4576 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172644 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172649 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172654 4576 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172659 4576 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172664 4576 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172671 4576 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172678 4576 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172684 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172689 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172695 4576 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172706 4576 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172711 4576 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172716 4576 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172721 4576 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172726 4576 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172731 4576 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172736 4576 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172741 4576 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172746 4576 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172751 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172756 4576 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172761 4576 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172766 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172771 4576 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172779 4576 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172786 4576 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172791 4576 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172798 4576 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172804 4576 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172809 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172815 4576 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172820 4576 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172825 4576 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172830 4576 feature_gate.go:330] unrecognized feature gate: Example Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172835 4576 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172841 4576 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172848 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172853 4576 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172858 4576 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172863 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172869 4576 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172873 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172880 4576 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172885 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172890 4576 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172895 4576 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172900 4576 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172905 4576 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172910 4576 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172914 4576 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172920 4576 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172924 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172929 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172934 4576 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172939 4576 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172944 4576 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172949 4576 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172954 4576 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172959 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172963 4576 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172969 4576 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172974 4576 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.172987 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.173004 4576 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.180577 4576 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.180602 4576 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180689 4576 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180696 4576 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180702 4576 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180707 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180713 4576 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180718 4576 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180723 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180728 4576 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180733 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180738 4576 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180743 4576 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180748 4576 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180753 4576 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180758 4576 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180763 4576 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180768 4576 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180773 4576 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180778 4576 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180783 4576 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180789 4576 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180796 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180801 4576 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180806 4576 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180811 4576 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180816 4576 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180821 4576 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180826 4576 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180839 4576 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180844 4576 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180850 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180855 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180865 4576 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180870 4576 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180875 4576 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180880 4576 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180885 4576 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180890 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180897 4576 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180903 4576 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180908 4576 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180914 4576 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180920 4576 feature_gate.go:330] unrecognized feature gate: Example Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180925 4576 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180930 4576 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180935 4576 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180940 4576 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180945 4576 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180950 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180955 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180960 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180965 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180970 4576 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180975 4576 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180980 4576 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180986 4576 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180992 4576 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.180999 4576 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181005 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181011 4576 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181017 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181022 4576 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181028 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181033 4576 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181039 4576 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181044 4576 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181049 4576 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181054 4576 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181088 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181093 4576 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181098 4576 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181103 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.181117 4576 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181254 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181280 4576 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181286 4576 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181291 4576 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181297 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181302 4576 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181306 4576 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181311 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181316 4576 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181321 4576 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181326 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181331 4576 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181336 4576 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181341 4576 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181346 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181351 4576 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181356 4576 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181362 4576 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181367 4576 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181372 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181379 4576 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181385 4576 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181391 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181396 4576 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181401 4576 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181406 4576 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181411 4576 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181417 4576 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181423 4576 feature_gate.go:330] unrecognized feature gate: Example Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181430 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181435 4576 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181440 4576 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181445 4576 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181450 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181455 4576 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181460 4576 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181465 4576 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181470 4576 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181475 4576 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181479 4576 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181484 4576 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181489 4576 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181494 4576 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181499 4576 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181504 4576 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181508 4576 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181513 4576 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181518 4576 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181544 4576 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181552 4576 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181558 4576 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181564 4576 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181570 4576 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181575 4576 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181579 4576 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181584 4576 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181589 4576 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181594 4576 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181599 4576 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181604 4576 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181609 4576 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181614 4576 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181619 4576 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181625 4576 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181631 4576 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181639 4576 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181645 4576 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181651 4576 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181657 4576 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181662 4576 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.181668 4576 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.181675 4576 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.181856 4576 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.186991 4576 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.187222 4576 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.188316 4576 server.go:997] "Starting client certificate rotation" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.188379 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.189328 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-07 02:31:36.389701976 +0000 UTC Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.189592 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.434967 4576 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.438016 4576 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.438588 4576 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.459451 4576 log.go:25] "Validated CRI v1 runtime API" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.484138 4576 log.go:25] "Validated CRI v1 image API" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.486779 4576 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.490487 4576 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-08-34-34-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.490585 4576 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.510361 4576 manager.go:217] Machine: {Timestamp:2025-12-03 08:39:53.508848775 +0000 UTC m=+0.894825839 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199472640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:da5fb8e3-5a19-4f2d-831e-00b6f563dbea BootID:86dcb870-937a-4231-a14c-053b8b425329 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039894528 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599738368 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076107 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599734272 Type:vfs Inodes:3076107 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:c4:a9:1d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:c4:a9:1d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e0:42:2b Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:52:4f:0d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:a3:9e:c2 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:11:18:c9 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:02:1c:65:9d:f8:a6 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:70:70:35:6f:8f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199472640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.510811 4576 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.510988 4576 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.511689 4576 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.512025 4576 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.512089 4576 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.512780 4576 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.512815 4576 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.513159 4576 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.513264 4576 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.513715 4576 state_mem.go:36] "Initialized new in-memory state store" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.513877 4576 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.515309 4576 kubelet.go:418] "Attempting to sync node with API server" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.515358 4576 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.515506 4576 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.515600 4576 kubelet.go:324] "Adding apiserver pod source" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.515649 4576 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.518698 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.518686 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.518844 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.518870 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.518919 4576 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.519558 4576 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.520409 4576 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521463 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521501 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521512 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521537 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521562 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521574 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521585 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521601 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521614 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521627 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521662 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521672 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.521959 4576 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.523039 4576 server.go:1280] "Started kubelet" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.523221 4576 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:53 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.523506 4576 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.529134 4576 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.530617 4576 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.533326 4576 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.534252 4576 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.534655 4576 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 18:28:45.797305709 +0000 UTC Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.535142 4576 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.535468 4576 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.535508 4576 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.536265 4576 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.537735 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="200ms" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.538703 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.538824 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.534427 4576 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.136:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187da7dc46e03f29 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 08:39:53.522847529 +0000 UTC m=+0.908824523,LastTimestamp:2025-12-03 08:39:53.522847529 +0000 UTC m=+0.908824523,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.541363 4576 server.go:460] "Adding debug handlers to kubelet server" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554079 4576 factory.go:55] Registering systemd factory Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554165 4576 factory.go:221] Registration of the systemd container factory successfully Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554418 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554491 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554506 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554544 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554579 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554606 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554619 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554633 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554649 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554662 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554687 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554703 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554715 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554731 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554742 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554758 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554771 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554785 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554799 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554811 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554827 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554841 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554858 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554872 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554904 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554922 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554951 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554968 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554982 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.554998 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555013 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555049 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555064 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555078 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555093 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555108 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555121 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555150 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555163 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555179 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555192 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555208 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555223 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555237 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555248 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555260 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555274 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555289 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555302 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555315 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555329 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555349 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555376 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555401 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555423 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555442 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555459 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555476 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555491 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555509 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.555547 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556408 4576 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556442 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556458 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556474 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556499 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556559 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556584 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556604 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556620 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556634 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556661 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556675 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556689 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556702 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556716 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556728 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556741 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556753 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556768 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556781 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556795 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556811 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556825 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556836 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556849 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556862 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556877 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556890 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556910 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556923 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556938 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556950 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556962 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556976 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.556990 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557004 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557056 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557072 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557086 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557097 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557109 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557123 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557138 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557154 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557175 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557190 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557206 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557220 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557234 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557249 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557264 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557279 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557310 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557325 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557337 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557351 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557365 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557379 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557394 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557407 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557420 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557435 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557451 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557463 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557475 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557488 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557502 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557516 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557554 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557570 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557601 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557625 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557646 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557663 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557683 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557702 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557721 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557737 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557750 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557765 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557779 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557793 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557807 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557820 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557836 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557852 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557866 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557881 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557894 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557908 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557924 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557937 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557950 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557963 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557980 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.557994 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558010 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558026 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558039 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558052 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558067 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558080 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558095 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558109 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558122 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558135 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558150 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558164 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558179 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558193 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558209 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558224 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558237 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558250 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558265 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558279 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558293 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558306 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558318 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558332 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558348 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558366 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558385 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558402 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558419 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558439 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558453 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558480 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558500 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558513 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558548 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558561 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558578 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558592 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558605 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558617 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558628 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558644 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558659 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558673 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558686 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558699 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558713 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558725 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558739 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558752 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558764 4576 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558776 4576 reconstruct.go:97] "Volume reconstruction finished" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.558794 4576 reconciler.go:26] "Reconciler: start to sync state" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.561190 4576 factory.go:153] Registering CRI-O factory Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.561227 4576 factory.go:221] Registration of the crio container factory successfully Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.561348 4576 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.561406 4576 factory.go:103] Registering Raw factory Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.561445 4576 manager.go:1196] Started watching for new ooms in manager Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.566463 4576 manager.go:319] Starting recovery of all containers Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.640326 4576 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.645710 4576 manager.go:324] Recovery completed Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.663791 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.667163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.667229 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.667239 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.668770 4576 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.668800 4576 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.668846 4576 state_mem.go:36] "Initialized new in-memory state store" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.673303 4576 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.675800 4576 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.675880 4576 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.675936 4576 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.676028 4576 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 08:39:53 crc kubenswrapper[4576]: W1203 08:39:53.677314 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.677428 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.678644 4576 policy_none.go:49] "None policy: Start" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.680155 4576 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.680192 4576 state_mem.go:35] "Initializing new in-memory state store" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.739281 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="400ms" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.741681 4576 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.743750 4576 manager.go:334] "Starting Device Plugin manager" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.743807 4576 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.743828 4576 server.go:79] "Starting device plugin registration server" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.744279 4576 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.744312 4576 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.744772 4576 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.744857 4576 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.744867 4576 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.754639 4576 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.776367 4576 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.776561 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.778103 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.778129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.778140 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.778331 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.778936 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779025 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779414 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779570 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779711 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.779743 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.780637 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.780672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.780682 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.780966 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.781035 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.781055 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.781065 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.781453 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.781482 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782085 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782102 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782160 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782169 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782226 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782235 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782273 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782513 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.782585 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783455 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783640 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783661 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783705 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783744 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.783772 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.784443 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.784471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.784479 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.844516 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.845597 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.845635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.845645 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.845670 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:39:53 crc kubenswrapper[4576]: E1203 08:39:53.846322 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.136:6443: connect: connection refused" node="crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862493 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862694 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862872 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862922 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862955 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.862979 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863002 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863030 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863050 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863071 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863094 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863116 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863137 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863160 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.863189 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.964603 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.964770 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.964880 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.964781 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965084 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.964833 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965283 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965401 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965427 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965467 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965494 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965562 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965627 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965741 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965633 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965899 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.965942 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966091 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966129 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966269 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966247 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966358 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966338 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966447 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966421 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966555 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966586 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966671 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966703 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:53 crc kubenswrapper[4576]: I1203 08:39:53.966788 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.047428 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.049742 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.049811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.049831 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.049874 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.050711 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.136:6443: connect: connection refused" node="crc" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.107969 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.130822 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.140022 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-17fe565f83592165ce1ef34fe663a175583b9aff2db7b489bc2f28fba90cdeb1 WatchSource:0}: Error finding container 17fe565f83592165ce1ef34fe663a175583b9aff2db7b489bc2f28fba90cdeb1: Status 404 returned error can't find the container with id 17fe565f83592165ce1ef34fe663a175583b9aff2db7b489bc2f28fba90cdeb1 Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.140310 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="800ms" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.140477 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.146776 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.165395 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.171154 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f6a38d15f6bf5bc02194418652550f97bb823196cbecdf18014b93bef3226ecd WatchSource:0}: Error finding container f6a38d15f6bf5bc02194418652550f97bb823196cbecdf18014b93bef3226ecd: Status 404 returned error can't find the container with id f6a38d15f6bf5bc02194418652550f97bb823196cbecdf18014b93bef3226ecd Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.177022 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-ab121f0ce3313a9139aad057ad3c6e3f5a61308bbe68de943469a1946481567a WatchSource:0}: Error finding container ab121f0ce3313a9139aad057ad3c6e3f5a61308bbe68de943469a1946481567a: Status 404 returned error can't find the container with id ab121f0ce3313a9139aad057ad3c6e3f5a61308bbe68de943469a1946481567a Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.250309 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9740070b3d739bd38165cb6d12f132779f19b2b3baca5c15bdc85dce6b1613dd WatchSource:0}: Error finding container 9740070b3d739bd38165cb6d12f132779f19b2b3baca5c15bdc85dce6b1613dd: Status 404 returned error can't find the container with id 9740070b3d739bd38165cb6d12f132779f19b2b3baca5c15bdc85dce6b1613dd Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.437768 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.437961 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.451485 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.453215 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.453264 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.453357 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.453412 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.454046 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.136:6443: connect: connection refused" node="crc" Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.485410 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.485519 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.531848 4576 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.535257 4576 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 00:04:43.589158748 +0000 UTC Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.731466 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ab121f0ce3313a9139aad057ad3c6e3f5a61308bbe68de943469a1946481567a"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.734653 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.734717 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"17fe565f83592165ce1ef34fe663a175583b9aff2db7b489bc2f28fba90cdeb1"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.734900 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.736521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.736560 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.736579 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.738223 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.738267 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9740070b3d739bd38165cb6d12f132779f19b2b3baca5c15bdc85dce6b1613dd"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.738379 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.739662 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.739684 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.739692 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.741023 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.741052 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cd592f8d2499cdc7d6c43f9856925972bbcf85a4111a46b52ddbe1740d87fdfd"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.742452 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.742508 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f6a38d15f6bf5bc02194418652550f97bb823196cbecdf18014b93bef3226ecd"} Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.742661 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.743428 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.743453 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:54 crc kubenswrapper[4576]: I1203 08:39:54.743461 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.763301 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.763492 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:54 crc kubenswrapper[4576]: W1203 08:39:54.819303 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.819434 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:54 crc kubenswrapper[4576]: E1203 08:39:54.941387 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="1.6s" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.255207 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.257256 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.257320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.257340 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.257410 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:39:55 crc kubenswrapper[4576]: E1203 08:39:55.258250 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.136:6443: connect: connection refused" node="crc" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.526786 4576 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.536231 4576 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 20:08:06.054904128 +0000 UTC Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.536355 4576 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 155h28m10.518553269s for next certificate rotation Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.596090 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 08:39:55 crc kubenswrapper[4576]: E1203 08:39:55.598128 4576 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.747127 4576 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793" exitCode=0 Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.747321 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.747942 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749417 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749367 4576 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d" exitCode=0 Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749608 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.749818 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.751797 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.751859 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.751886 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.752385 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.752419 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.752425 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.752542 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.753318 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.753342 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.753351 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.755117 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16" exitCode=0 Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.755180 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.755301 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756073 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756081 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756841 4576 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f" exitCode=0 Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756871 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f"} Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.756975 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.757716 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.757760 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.757779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.763394 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.764164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.764194 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:55 crc kubenswrapper[4576]: I1203 08:39:55.764202 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.526427 4576 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:56 crc kubenswrapper[4576]: E1203 08:39:56.544775 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="3.2s" Dec 03 08:39:56 crc kubenswrapper[4576]: W1203 08:39:56.577798 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:56 crc kubenswrapper[4576]: E1203 08:39:56.577904 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:56 crc kubenswrapper[4576]: E1203 08:39:56.757699 4576 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.136:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187da7dc46e03f29 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 08:39:53.522847529 +0000 UTC m=+0.908824523,LastTimestamp:2025-12-03 08:39:53.522847529 +0000 UTC m=+0.908824523,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.762716 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.762798 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.762822 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.764896 4576 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4" exitCode=0 Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.764961 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.765128 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.766243 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.766266 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.766275 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.768013 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.768084 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.776744 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.776766 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.776776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.779056 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.779394 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.779419 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.779430 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26"} Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.779500 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780082 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780715 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.780725 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.850172 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.858242 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.858668 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.859800 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.859841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.859855 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:56 crc kubenswrapper[4576]: I1203 08:39:56.859884 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:39:56 crc kubenswrapper[4576]: E1203 08:39:56.860318 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.136:6443: connect: connection refused" node="crc" Dec 03 08:39:57 crc kubenswrapper[4576]: W1203 08:39:57.030076 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.136:6443: connect: connection refused Dec 03 08:39:57 crc kubenswrapper[4576]: E1203 08:39:57.030452 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.136:6443: connect: connection refused" logger="UnhandledError" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.784953 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706"} Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.784999 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce"} Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.785097 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.788233 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.788284 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.788302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.789681 4576 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9" exitCode=0 Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.789713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9"} Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.790034 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.790091 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791314 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791374 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791400 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791639 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791735 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:57 crc kubenswrapper[4576]: I1203 08:39:57.791824 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.796967 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b"} Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797032 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d"} Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797052 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837"} Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797071 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77"} Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797083 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797096 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797139 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797089 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3"} Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797304 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.797409 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798204 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798221 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798373 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.798436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.799060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.799094 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:58 crc kubenswrapper[4576]: I1203 08:39:58.799109 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.092298 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.757410 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.800430 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.800496 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.800433 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.801933 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.801981 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802004 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802496 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802556 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802725 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:39:59 crc kubenswrapper[4576]: I1203 08:39:59.802571 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.061270 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.062816 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.062861 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.062876 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.062904 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.244956 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.289135 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.803500 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.803753 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.806973 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.807018 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.807035 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.807806 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.807892 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:00 crc kubenswrapper[4576]: I1203 08:40:00.807921 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.141735 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.141922 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.142981 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.143042 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.143052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.728882 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.729197 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.731480 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.731557 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.731571 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.864672 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.864896 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.866033 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.866106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:01 crc kubenswrapper[4576]: I1203 08:40:01.866131 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:03 crc kubenswrapper[4576]: E1203 08:40:03.754836 4576 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.923223 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.923485 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.925134 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.925201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.925222 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:04 crc kubenswrapper[4576]: I1203 08:40:04.929395 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:05 crc kubenswrapper[4576]: I1203 08:40:05.815559 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:05 crc kubenswrapper[4576]: I1203 08:40:05.816661 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:05 crc kubenswrapper[4576]: I1203 08:40:05.816696 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:05 crc kubenswrapper[4576]: I1203 08:40:05.816708 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:06 crc kubenswrapper[4576]: I1203 08:40:06.857412 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 08:40:06 crc kubenswrapper[4576]: I1203 08:40:06.857649 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:06 crc kubenswrapper[4576]: I1203 08:40:06.858936 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:06 crc kubenswrapper[4576]: I1203 08:40:06.858979 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:06 crc kubenswrapper[4576]: I1203 08:40:06.858988 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:07 crc kubenswrapper[4576]: I1203 08:40:07.527132 4576 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 08:40:07 crc kubenswrapper[4576]: W1203 08:40:07.527162 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 08:40:07 crc kubenswrapper[4576]: I1203 08:40:07.527260 4576 trace.go:236] Trace[1561769008]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 08:39:57.526) (total time: 10000ms): Dec 03 08:40:07 crc kubenswrapper[4576]: Trace[1561769008]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (08:40:07.527) Dec 03 08:40:07 crc kubenswrapper[4576]: Trace[1561769008]: [10.000909291s] [10.000909291s] END Dec 03 08:40:07 crc kubenswrapper[4576]: E1203 08:40:07.527290 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 08:40:07 crc kubenswrapper[4576]: W1203 08:40:07.681699 4576 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 08:40:07 crc kubenswrapper[4576]: I1203 08:40:07.681811 4576 trace.go:236] Trace[967231242]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 08:39:57.679) (total time: 10001ms): Dec 03 08:40:07 crc kubenswrapper[4576]: Trace[967231242]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (08:40:07.681) Dec 03 08:40:07 crc kubenswrapper[4576]: Trace[967231242]: [10.001877071s] [10.001877071s] END Dec 03 08:40:07 crc kubenswrapper[4576]: E1203 08:40:07.681836 4576 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 08:40:07 crc kubenswrapper[4576]: I1203 08:40:07.924264 4576 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 08:40:07 crc kubenswrapper[4576]: I1203 08:40:07.924444 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 08:40:09 crc kubenswrapper[4576]: I1203 08:40:09.169348 4576 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 08:40:09 crc kubenswrapper[4576]: I1203 08:40:09.169740 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 08:40:09 crc kubenswrapper[4576]: I1203 08:40:09.177229 4576 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 08:40:09 crc kubenswrapper[4576]: I1203 08:40:09.177314 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.739691 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.739917 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.742286 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.742592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.742839 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.745366 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.830688 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.831939 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.832002 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.832027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:11 crc kubenswrapper[4576]: I1203 08:40:11.929948 4576 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 08:40:13 crc kubenswrapper[4576]: E1203 08:40:13.781048 4576 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 08:40:13 crc kubenswrapper[4576]: I1203 08:40:13.965014 4576 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.169569 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.172563 4576 trace.go:236] Trace[1854869270]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 08:40:01.694) (total time: 12477ms): Dec 03 08:40:14 crc kubenswrapper[4576]: Trace[1854869270]: ---"Objects listed" error: 12477ms (08:40:14.172) Dec 03 08:40:14 crc kubenswrapper[4576]: Trace[1854869270]: [12.477648648s] [12.477648648s] END Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.172593 4576 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.173048 4576 trace.go:236] Trace[1949080629]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 08:40:01.403) (total time: 12769ms): Dec 03 08:40:14 crc kubenswrapper[4576]: Trace[1949080629]: ---"Objects listed" error: 12769ms (08:40:14.172) Dec 03 08:40:14 crc kubenswrapper[4576]: Trace[1949080629]: [12.769278046s] [12.769278046s] END Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.173067 4576 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.174099 4576 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.174934 4576 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.177920 4576 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.217628 4576 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57494->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.217886 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57494->192.168.126.11:17697: read: connection reset by peer" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.218617 4576 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.218746 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.674039 4576 apiserver.go:52] "Watching apiserver" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.682897 4576 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.683426 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.684473 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.684591 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.684710 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.685262 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.685298 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.685680 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.685766 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.685858 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.685869 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.712577 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.713397 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.713403 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.713809 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.713910 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.713978 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.714098 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.714980 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.716384 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.737028 4576 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.778962 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.834986 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835027 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835046 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835068 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835090 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835104 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835136 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835150 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835173 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835203 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835222 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835239 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835259 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835284 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835319 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835344 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835383 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835409 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835428 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835447 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835464 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835480 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835495 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835514 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835532 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835567 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835585 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835605 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835641 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835657 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835680 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835707 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835723 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835758 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835780 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835740 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835961 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.835989 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836018 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836049 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836078 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836064 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836110 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836163 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836181 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836217 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836246 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836268 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836297 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836339 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836366 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836369 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836392 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836416 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836594 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836654 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836684 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836713 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836742 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836736 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836792 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836821 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836825 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836846 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836870 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836901 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836970 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836981 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.836991 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837021 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837039 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837059 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837075 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837092 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837084 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837110 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837148 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837193 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837368 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837447 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837643 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837780 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837802 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837918 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.837999 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838032 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838182 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838211 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838224 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838224 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838465 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838591 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838659 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838807 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838839 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.838909 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839075 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839097 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839260 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839276 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839295 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839365 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839432 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839451 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839477 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839547 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839598 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839675 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.839714 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840094 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840109 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840269 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840518 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842020 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841281 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840557 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840700 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.840968 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841073 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842122 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841345 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841439 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841459 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.841468 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:15.341393199 +0000 UTC m=+22.727370193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842217 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842248 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842270 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842286 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842305 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842624 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842660 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842711 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842740 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842789 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842818 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842876 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842898 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842918 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842959 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842982 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843022 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843046 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843067 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843105 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843127 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843146 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843184 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843205 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843228 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843265 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843287 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843309 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843348 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843369 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843389 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843441 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843463 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843503 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843561 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843586 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843605 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843649 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843673 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843696 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843742 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843764 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843895 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.843924 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842554 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.842600 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841973 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844199 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844232 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844333 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844494 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844958 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.844992 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.845044 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.845127 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.845154 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.845397 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.846717 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.849871 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.850950 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.851378 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.851580 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.851872 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.852070 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.853042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.841949 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857163 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857339 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857654 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857649 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857709 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857738 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857760 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857782 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857802 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857827 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857845 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857873 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857873 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857889 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857908 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857926 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857943 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857965 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.857985 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858003 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858019 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858034 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858051 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858076 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858091 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858096 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858109 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858126 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858149 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858164 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858180 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858208 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858225 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858241 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858257 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858272 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858294 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858310 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858324 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858341 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858361 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858382 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858387 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858414 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858431 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858448 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858463 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858477 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858495 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858511 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858530 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858569 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858587 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858602 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858618 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858694 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858725 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858766 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858814 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858835 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858866 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858893 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858922 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858948 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858965 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.858973 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859008 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859032 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859055 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859074 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859094 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859114 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859135 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859158 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859183 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859205 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859230 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859254 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859281 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859306 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859331 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859352 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859375 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859468 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859494 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859519 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859564 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859591 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859613 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859634 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859679 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859709 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859715 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859757 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859765 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859815 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859847 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859881 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.859907 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860000 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860146 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860168 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860182 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860183 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860197 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860211 4576 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860226 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860238 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860252 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860265 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860279 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860291 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860304 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860317 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860329 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860343 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860371 4576 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860383 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860395 4576 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860410 4576 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860425 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860438 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860451 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860464 4576 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860478 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860483 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860491 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860528 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860986 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.860994 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.861275 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.861295 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.861338 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.861579 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.861718 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.862397 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864349 4576 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864439 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864455 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864465 4576 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864476 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864488 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864502 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864520 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864534 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864595 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864606 4576 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864615 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864633 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864647 4576 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864659 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864671 4576 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864696 4576 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864718 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864738 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864750 4576 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864762 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864774 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864785 4576 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864794 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864803 4576 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864811 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864822 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864832 4576 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864841 4576 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864852 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864862 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864871 4576 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864880 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864909 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864919 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864929 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864940 4576 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864950 4576 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864959 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864968 4576 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864976 4576 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864985 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.864999 4576 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.870184 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706" exitCode=255 Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.870379 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706"} Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.865039 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916206 4576 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916242 4576 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916262 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916277 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916305 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916333 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.916914 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.917948 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.918472 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.918525 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.918858 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.919167 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.919327 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.919624 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.919966 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.920019 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.920350 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.920564 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.920682 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.921604 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.921649 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.921680 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.921798 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:15.421769602 +0000 UTC m=+22.807746586 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.922042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.922346 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.922530 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.922752 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.922935 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.923009 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.923244 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.923440 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.923030 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.928256 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.928447 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.928666 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.928921 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.929147 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.929369 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.929600 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.929774 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.929943 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.930589 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.930856 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.931159 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.932360 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.932378 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.932822 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.934306 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.934610 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.934817 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.934857 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.937583 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.939325 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.939373 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.939677 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.940370 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.940632 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.940836 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.941048 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.941297 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.941472 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942261 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942313 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942471 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942531 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942750 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.942943 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.943102 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.943384 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.943664 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.944630 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.946531 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.946777 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.947025 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.947229 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.947425 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.948364 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.948614 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.948633 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.949633 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.950026 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.954275 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.955590 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.955881 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.955977 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.956257 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.956492 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.956746 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.956945 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.956969 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957144 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957681 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957743 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957824 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957977 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.958216 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.958600 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.958615 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.958654 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.959058 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.959060 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.968266 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:15.468232126 +0000 UTC m=+22.854209110 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.957484 4576 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.969907 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.970099 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.959515 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.958894 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.959717 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.959180 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: E1203 08:40:14.970370 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:15.470348661 +0000 UTC m=+22.856325645 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.959474 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.959323 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.971961 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 08:40:14 crc kubenswrapper[4576]: I1203 08:40:14.972016 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.001573 4576 scope.go:117] "RemoveContainer" containerID="7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.002455 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.005362 4576 csr.go:261] certificate signing request csr-fjj59 is approved, waiting to be issued Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.005757 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.006194 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.006564 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.006738 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.011606 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.012732 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.014784 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.017443 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.019793 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.019907 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.019984 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020006 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020019 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020031 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020045 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020057 4576 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020069 4576 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020082 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020093 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020115 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020128 4576 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020140 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020153 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020165 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020178 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020190 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020207 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020219 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020231 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020244 4576 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020256 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020267 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020281 4576 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020293 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020304 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020316 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020330 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020342 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020364 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020377 4576 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020406 4576 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020424 4576 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020437 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020451 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020464 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020476 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020488 4576 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020501 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020513 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020528 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020626 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020646 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020668 4576 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020681 4576 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020696 4576 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020733 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020745 4576 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020756 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020768 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020779 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020800 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020813 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020823 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020835 4576 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020848 4576 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020859 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020872 4576 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020885 4576 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020898 4576 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020909 4576 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020921 4576 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020932 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020945 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020956 4576 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020968 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020981 4576 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.020992 4576 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021003 4576 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021016 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021027 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021047 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021058 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021070 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021080 4576 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021091 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021103 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021116 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021126 4576 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021137 4576 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021149 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021161 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021172 4576 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021184 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021194 4576 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021207 4576 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021217 4576 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021229 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021242 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021253 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021265 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021276 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021287 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021298 4576 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021309 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021320 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021332 4576 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021355 4576 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021371 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021383 4576 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021394 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021404 4576 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021416 4576 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021428 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021440 4576 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021451 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021462 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021472 4576 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021483 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021493 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021504 4576 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021514 4576 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021528 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021539 4576 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021573 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021851 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.021905 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.022962 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.023092 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.023109 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.023123 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.024349 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.025065 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.025190 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:15.52315728 +0000 UTC m=+22.909134344 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.028648 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.036188 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.042220 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.046898 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.048672 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.050514 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.057089 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.061119 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.093305 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.120123 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.122709 4576 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.122740 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.122752 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.122763 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.134994 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.135341 4576 csr.go:257] certificate signing request csr-fjj59 is issued Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.153441 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.248144 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.277869 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.300000 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.314689 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.321642 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.346080 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.382859 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.475010 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.475084 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.475117 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.475143 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475263 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475359 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:16.475333859 +0000 UTC m=+23.861310833 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475449 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:16.475442682 +0000 UTC m=+23.861419666 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475568 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475581 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475602 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475626 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:16.475619056 +0000 UTC m=+23.861596040 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475679 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.475702 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:16.475696568 +0000 UTC m=+23.861673552 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.500779 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.527155 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.546862 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.615132 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.615162 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.615179 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.615233 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:16.615218245 +0000 UTC m=+24.001195239 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.575691 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.682923 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.683607 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.685685 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.686696 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.687436 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.688174 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.689324 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.690048 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.691572 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.692236 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.693187 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.693885 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.694769 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.695705 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.696383 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.697443 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.698684 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.699507 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.700250 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.701133 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.765867 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.766879 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.767848 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.768564 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.769542 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.770339 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.771686 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.772268 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.772996 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.774061 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.774771 4576 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.774903 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.777810 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.778632 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.779444 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.782228 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.783196 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.783844 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.785207 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.786814 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.787685 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.788588 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.789889 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.791306 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.792050 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.793261 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.801062 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.802112 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.803308 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.803812 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.804279 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.805429 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.806111 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.806738 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.877954 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683"} Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.878014 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"aebc8ad7e5eccf68e64ae4540c41425cff8a7a7aa139e97c1b02caeb7b6bc7fc"} Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.878236 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.881685 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.883348 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf"} Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.884468 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.885982 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"670bac7f495f58c9bc683d7f19de0195fc8565a1d205329a72f7b0cb7642768d"} Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.887606 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3"} Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.887643 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6b7973d2889f325d9622b3e08d8273309183aa85fb23186099ca5f57b1256733"} Dec 03 08:40:15 crc kubenswrapper[4576]: E1203 08:40:15.924502 4576 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.927440 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.962968 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.975474 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:15 crc kubenswrapper[4576]: I1203 08:40:15.998003 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.018277 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.040648 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.081764 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.158256 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-03 08:35:15 +0000 UTC, rotation deadline is 2026-10-16 03:11:47.348228004 +0000 UTC Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.158304 4576 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7602h31m31.189927106s for next certificate rotation Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.293429 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.321510 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-q8kww"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.322514 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jbxx2"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.322809 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.322809 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.326794 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.326829 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.326961 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.330664 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.330746 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.332232 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-pjb2d"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.332738 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.334693 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-6qf5t"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.334982 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.337815 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.337840 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.337991 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338109 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338213 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338393 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338336 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338513 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338576 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.338612 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.349730 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370142 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370545 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-system-cni-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370571 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-daemon-config\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370588 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370603 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-socket-dir-parent\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370642 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-multus\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370783 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-os-release\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370823 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phk44\" (UniqueName: \"kubernetes.io/projected/72bb738c-5ba1-4104-8729-1a929fa6d2ba-kube-api-access-phk44\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370871 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-hostroot\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370889 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-bin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370905 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-kubelet\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370941 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-os-release\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.370978 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-k8s-cni-cncf-io\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371004 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-binary-copy\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371028 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cnibin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371047 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371126 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cni-binary-copy\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371214 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-system-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371238 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-conf-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371301 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cnibin\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371321 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-netns\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371336 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-multus-certs\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371369 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-etc-kubernetes\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371402 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gm6g\" (UniqueName: \"kubernetes.io/projected/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-kube-api-access-9gm6g\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.371425 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.393655 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.404921 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.422322 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.446296 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.462071 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472469 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cnibin\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472585 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cnibin\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472647 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-netns\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472663 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-multus-certs\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472717 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-netns\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472735 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60b1bede-26e9-4b5d-b450-9866da685693-mcd-auth-proxy-config\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-etc-kubernetes\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472770 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gm6g\" (UniqueName: \"kubernetes.io/projected/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-kube-api-access-9gm6g\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472810 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-etc-kubernetes\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472813 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-multus-certs\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472842 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472856 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-system-cni-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472901 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472926 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-socket-dir-parent\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472942 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-daemon-config\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472963 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a93555e6-d807-4db3-85c0-843f309e6efa-hosts-file\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.472994 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-multus\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473011 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60b1bede-26e9-4b5d-b450-9866da685693-rootfs\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473036 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phk44\" (UniqueName: \"kubernetes.io/projected/72bb738c-5ba1-4104-8729-1a929fa6d2ba-kube-api-access-phk44\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473052 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-hostroot\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473073 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-os-release\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473087 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-bin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473104 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-kubelet\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473132 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mplbz\" (UniqueName: \"kubernetes.io/projected/a93555e6-d807-4db3-85c0-843f309e6efa-kube-api-access-mplbz\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473147 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-os-release\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473163 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-k8s-cni-cncf-io\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473178 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrrgd\" (UniqueName: \"kubernetes.io/projected/60b1bede-26e9-4b5d-b450-9866da685693-kube-api-access-nrrgd\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473200 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-binary-copy\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473227 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cnibin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473244 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473259 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cni-binary-copy\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473291 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-system-cni-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473357 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-multus\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473414 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-cni-bin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473440 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-run-k8s-cni-cncf-io\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473515 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-socket-dir-parent\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473526 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cnibin\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473541 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-hostroot\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473575 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-host-var-lib-kubelet\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473569 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473716 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473914 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/72bb738c-5ba1-4104-8729-1a929fa6d2ba-os-release\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-os-release\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.473275 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60b1bede-26e9-4b5d-b450-9866da685693-proxy-tls\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474053 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-system-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474072 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-conf-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474140 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-system-cni-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474201 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-conf-dir\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474220 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474251 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/72bb738c-5ba1-4104-8729-1a929fa6d2ba-cni-binary-copy\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-cni-binary-copy\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.474371 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-multus-daemon-config\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.493586 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gm6g\" (UniqueName: \"kubernetes.io/projected/e2b7eac2-6611-49d0-9da5-f97a3ccc9529-kube-api-access-9gm6g\") pod \"multus-jbxx2\" (UID: \"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\") " pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.496016 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phk44\" (UniqueName: \"kubernetes.io/projected/72bb738c-5ba1-4104-8729-1a929fa6d2ba-kube-api-access-phk44\") pod \"multus-additional-cni-plugins-q8kww\" (UID: \"72bb738c-5ba1-4104-8729-1a929fa6d2ba\") " pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.497276 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.510927 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.525714 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.537810 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.547843 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.561246 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575447 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575473 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a93555e6-d807-4db3-85c0-843f309e6efa-hosts-file\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575500 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575573 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60b1bede-26e9-4b5d-b450-9866da685693-rootfs\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575593 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mplbz\" (UniqueName: \"kubernetes.io/projected/a93555e6-d807-4db3-85c0-843f309e6efa-kube-api-access-mplbz\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575612 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrrgd\" (UniqueName: \"kubernetes.io/projected/60b1bede-26e9-4b5d-b450-9866da685693-kube-api-access-nrrgd\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575597 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575670 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60b1bede-26e9-4b5d-b450-9866da685693-proxy-tls\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575688 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575707 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60b1bede-26e9-4b5d-b450-9866da685693-mcd-auth-proxy-config\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.575813 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60b1bede-26e9-4b5d-b450-9866da685693-rootfs\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.575925 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:18.575904892 +0000 UTC m=+25.961881886 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576016 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576031 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576043 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576076 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:18.576068566 +0000 UTC m=+25.962045540 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.576123 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a93555e6-d807-4db3-85c0-843f309e6efa-hosts-file\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576156 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576176 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:18.576169959 +0000 UTC m=+25.962146943 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.576275 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60b1bede-26e9-4b5d-b450-9866da685693-mcd-auth-proxy-config\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576594 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.576784 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:18.576758764 +0000 UTC m=+25.962735808 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.584022 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60b1bede-26e9-4b5d-b450-9866da685693-proxy-tls\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.631261 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.642769 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q8kww" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.643165 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mplbz\" (UniqueName: \"kubernetes.io/projected/a93555e6-d807-4db3-85c0-843f309e6efa-kube-api-access-mplbz\") pod \"node-resolver-6qf5t\" (UID: \"a93555e6-d807-4db3-85c0-843f309e6efa\") " pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.643166 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrrgd\" (UniqueName: \"kubernetes.io/projected/60b1bede-26e9-4b5d-b450-9866da685693-kube-api-access-nrrgd\") pod \"machine-config-daemon-pjb2d\" (UID: \"60b1bede-26e9-4b5d-b450-9866da685693\") " pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.654040 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 08:40:16 crc kubenswrapper[4576]: W1203 08:40:16.655155 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72bb738c_5ba1_4104_8729_1a929fa6d2ba.slice/crio-ffb5baa71014198a6fd939b5f9b9d0a5cdf794cb5a00bef2f7e4c759d6774f94 WatchSource:0}: Error finding container ffb5baa71014198a6fd939b5f9b9d0a5cdf794cb5a00bef2f7e4c759d6774f94: Status 404 returned error can't find the container with id ffb5baa71014198a6fd939b5f9b9d0a5cdf794cb5a00bef2f7e4c759d6774f94 Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.666679 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jbxx2" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.675546 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.676171 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.676336 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.676350 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.676390 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.676427 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:18.676412716 +0000 UTC m=+26.062389700 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.677953 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.678085 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.678165 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.678208 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.678246 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:16 crc kubenswrapper[4576]: E1203 08:40:16.678292 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.681799 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-6qf5t" Dec 03 08:40:16 crc kubenswrapper[4576]: W1203 08:40:16.695526 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60b1bede_26e9_4b5d_b450_9866da685693.slice/crio-e481d253d94a1f071e29bbbfa436eabbdd8555596287c4f3e8e237588aad38da WatchSource:0}: Error finding container e481d253d94a1f071e29bbbfa436eabbdd8555596287c4f3e8e237588aad38da: Status 404 returned error can't find the container with id e481d253d94a1f071e29bbbfa436eabbdd8555596287c4f3e8e237588aad38da Dec 03 08:40:16 crc kubenswrapper[4576]: W1203 08:40:16.700768 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2b7eac2_6611_49d0_9da5_f97a3ccc9529.slice/crio-8a33e30bd3e7afa9d37789dc5a20d57e4c8163a892c10cdfb6c98ccef7a7308c WatchSource:0}: Error finding container 8a33e30bd3e7afa9d37789dc5a20d57e4c8163a892c10cdfb6c98ccef7a7308c: Status 404 returned error can't find the container with id 8a33e30bd3e7afa9d37789dc5a20d57e4c8163a892c10cdfb6c98ccef7a7308c Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.732204 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.742520 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5f9zh"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.743423 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.746760 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.746964 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.747296 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.747449 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.748715 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.749178 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.749589 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.791680 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.823496 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.855895 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878576 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878620 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878638 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878655 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878695 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878718 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878734 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878769 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878782 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878797 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878841 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878856 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqbqk\" (UniqueName: \"kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878871 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878885 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878899 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878918 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878938 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878958 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.878975 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.879232 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.892122 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.898070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerStarted","Data":"ffb5baa71014198a6fd939b5f9b9d0a5cdf794cb5a00bef2f7e4c759d6774f94"} Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.906880 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerStarted","Data":"8a33e30bd3e7afa9d37789dc5a20d57e4c8163a892c10cdfb6c98ccef7a7308c"} Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.917899 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9"} Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.918709 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.931811 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.933486 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-6qf5t" event={"ID":"a93555e6-d807-4db3-85c0-843f309e6efa","Type":"ContainerStarted","Data":"17770704fbcc30dffaac90efa5397cf3b42c7e7bb91687e16b0acbe52f206db4"} Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.937813 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"e481d253d94a1f071e29bbbfa436eabbdd8555596287c4f3e8e237588aad38da"} Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.951658 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.959452 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.971839 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981163 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981192 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981226 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981281 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981311 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqbqk\" (UniqueName: \"kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981331 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981350 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981382 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981400 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981415 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981432 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981446 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981460 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981477 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981492 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981510 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981532 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981585 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981624 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981639 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981703 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.981737 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982192 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982427 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982658 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982670 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982702 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982732 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.982767 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983032 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983179 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983189 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983212 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983238 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.983271 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.984019 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.987160 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.988258 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.989480 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:16 crc kubenswrapper[4576]: I1203 08:40:16.991825 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:16Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.012071 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqbqk\" (UniqueName: \"kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk\") pod \"ovnkube-node-5f9zh\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.027866 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.049660 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.065395 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.080199 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.116487 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.124312 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.138922 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: W1203 08:40:17.141902 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfe34f07_1425_4b62_9eb0_70d1b197611c.slice/crio-2ea6ab1b6c0d4a382c35bb286754ac5275591e818129b71b6efd6e0cc1310860 WatchSource:0}: Error finding container 2ea6ab1b6c0d4a382c35bb286754ac5275591e818129b71b6efd6e0cc1310860: Status 404 returned error can't find the container with id 2ea6ab1b6c0d4a382c35bb286754ac5275591e818129b71b6efd6e0cc1310860 Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.155323 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.176908 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.198770 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.224003 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.247909 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.263140 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.280188 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.354102 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.380942 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.411477 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.430322 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.450492 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.498927 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.527101 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.550759 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.585953 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.625532 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.941286 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c" exitCode=0 Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.941367 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.944378 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerStarted","Data":"87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.945790 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" exitCode=0 Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.945853 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.945881 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"2ea6ab1b6c0d4a382c35bb286754ac5275591e818129b71b6efd6e0cc1310860"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.947097 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-6qf5t" event={"ID":"a93555e6-d807-4db3-85c0-843f309e6efa","Type":"ContainerStarted","Data":"69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.948704 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.948732 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290"} Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.963675 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.976915 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:17 crc kubenswrapper[4576]: I1203 08:40:17.992588 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:17Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.005159 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.032509 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.051507 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.075165 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.096666 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.129364 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.166659 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.193080 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.222906 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.251582 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.416955 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.444387 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.461604 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.481544 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.502734 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.521573 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.533936 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.551760 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.572934 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.616812 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.653287 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699184 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699251 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699318 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699376 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699415 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699446 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699484 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699646 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699703 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:22.699684513 +0000 UTC m=+30.085661497 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699746 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699778 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:22.699769406 +0000 UTC m=+30.085746390 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699811 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699864 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699882 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699894 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699930 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:22.69992127 +0000 UTC m=+30.085898254 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699976 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.699985 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.699868 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.700020 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.700101 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.700006 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:22.699996511 +0000 UTC m=+30.085973505 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.700130 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:18 crc kubenswrapper[4576]: E1203 08:40:18.700147 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:22.700133045 +0000 UTC m=+30.086110029 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.749414 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.786932 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.808480 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.974500 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.974975 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e" exitCode=1 Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.975027 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c"} Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.975051 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4"} Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.975060 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97"} Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.975070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e"} Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.975082 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f"} Dec 03 08:40:18 crc kubenswrapper[4576]: I1203 08:40:18.976677 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerStarted","Data":"876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797"} Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.074027 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:18Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.115350 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.148997 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.167654 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.182184 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.203480 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.222130 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.242398 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.257026 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.307026 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.343717 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.362087 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.382637 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.411031 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.434093 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:19Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.994044 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39"} Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.996944 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797" exitCode=0 Dec 03 08:40:19 crc kubenswrapper[4576]: I1203 08:40:19.997007 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.002088 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.002703 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.018821 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.035667 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.053991 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.072918 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.085182 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.105347 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.147377 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.159688 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.171103 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.187640 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.201780 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.222956 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.243663 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.259982 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.279049 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.293322 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.313561 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.324615 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-xrjlb"] Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.325961 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.328280 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.328283 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.329033 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.333610 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.337993 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.346206 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.363978 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.379922 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.390337 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-serviceca\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.390390 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlwv6\" (UniqueName: \"kubernetes.io/projected/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-kube-api-access-wlwv6\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.390427 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-host\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.399559 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.415864 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.432117 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.478517 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.491166 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlwv6\" (UniqueName: \"kubernetes.io/projected/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-kube-api-access-wlwv6\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.491213 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-host\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.491255 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-serviceca\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.491413 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-host\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.492143 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-serviceca\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.510760 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.522640 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.523267 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlwv6\" (UniqueName: \"kubernetes.io/projected/0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab-kube-api-access-wlwv6\") pod \"node-ca-xrjlb\" (UID: \"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\") " pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.540897 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.552111 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.565714 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.575345 4576 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.577277 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.577323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.577334 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.577521 4576 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.580122 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.585599 4576 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.585908 4576 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.587026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.587063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.587072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.587086 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.587109 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.597799 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.602025 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.605059 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.605097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.605110 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.605127 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.605138 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.609265 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.619483 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.622030 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.623519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.623581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.623595 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.623613 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.623625 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.636167 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.639773 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xrjlb" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.640349 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.640407 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.640421 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.640445 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.640457 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.642850 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.660884 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.661041 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.663994 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.664103 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.664171 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.664245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.664334 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.673435 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.676100 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.676399 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.676700 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.676749 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.676787 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.676826 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.677002 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: E1203 08:40:20.677103 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.678538 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.678566 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.678575 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.678589 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.678599 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.689546 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.709179 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.721607 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.736349 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.745896 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.756262 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:20Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.780999 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.781036 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.781048 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.781065 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.781077 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.883643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.883697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.883707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.883722 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.883732 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.986219 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.986257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.986266 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.986279 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:20 crc kubenswrapper[4576]: I1203 08:40:20.986288 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:20Z","lastTransitionTime":"2025-12-03T08:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.013902 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da" exitCode=0 Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.013977 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.016297 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xrjlb" event={"ID":"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab","Type":"ContainerStarted","Data":"cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.016375 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xrjlb" event={"ID":"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab","Type":"ContainerStarted","Data":"515861121cc2c41285b74d692b7839f0a1fce008f2b10434ac25c9ce09495a77"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.041028 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.054690 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.065970 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.079725 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.089355 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.089412 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.089424 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.089441 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.089453 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.090315 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.105429 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.117652 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.130606 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.142332 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.155163 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.172024 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.189002 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.191485 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.191512 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.191521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.191548 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.191557 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.212614 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.232436 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.243511 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.262361 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.275224 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.293386 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.293430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.293443 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.293460 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.293472 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.298524 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.313720 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.329185 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.339704 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.352838 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.368981 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.384074 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.395847 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.395883 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.395891 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.395907 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.395916 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.404641 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.427329 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.441880 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.463277 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.484025 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.496707 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:21Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.498543 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.498577 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.498588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.498602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.498613 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.600686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.600740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.600752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.600772 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.600785 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.703963 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.704042 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.704070 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.704102 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.704175 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.806686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.806730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.806740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.806755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.806764 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.908610 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.908664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.908678 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.908696 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:21 crc kubenswrapper[4576]: I1203 08:40:21.908708 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:21Z","lastTransitionTime":"2025-12-03T08:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.010307 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.010341 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.010349 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.010362 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.010371 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.024249 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.025593 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.027864 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b" exitCode=0 Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.027895 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.043888 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.058991 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.070052 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.083873 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.095607 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.110317 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.112588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.112629 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.112640 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.112657 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.112669 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.122012 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.141592 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.150746 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.163830 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.177027 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.189290 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.199988 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.215186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.215220 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.215233 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.215250 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.215263 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.220400 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.232578 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.317809 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.317848 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.317859 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.317874 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.317885 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.420394 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.420425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.420434 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.420454 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.420463 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.523740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.523767 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.523776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.523790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.523800 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.627576 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.627638 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.627659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.627684 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.627702 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.677190 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.677311 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.677199 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.677478 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.677332 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.677665 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.707389 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.707598 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707703 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:30.707669566 +0000 UTC m=+38.093646600 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.707800 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707845 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707886 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707902 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707965 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.707861 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.707976 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:30.707954913 +0000 UTC m=+38.093931907 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708084 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:30.708058986 +0000 UTC m=+38.094036000 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708126 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708166 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708184 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708254 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.708130 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708298 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:30.708275071 +0000 UTC m=+38.094252075 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:22 crc kubenswrapper[4576]: E1203 08:40:22.708398 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:30.708369634 +0000 UTC m=+38.094346648 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.729894 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.729933 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.729943 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.729956 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.729966 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.832466 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.832569 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.832595 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.832626 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.832647 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.935422 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.935464 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.935476 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.935491 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:22 crc kubenswrapper[4576]: I1203 08:40:22.935502 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:22Z","lastTransitionTime":"2025-12-03T08:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.038173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.038217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.038228 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.038245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.038260 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.116729 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerStarted","Data":"af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.145662 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.155618 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.160346 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.160387 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.160398 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.160416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.160426 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.165669 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.178853 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.189426 4576 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.191253 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-diagnostics/pods/network-check-target-xd92c/status\": read tcp 38.129.56.136:33768->38.129.56.136:6443: use of closed network connection" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.232364 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.244363 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.256724 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.262587 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.262647 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.262665 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.262686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.262724 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.267222 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.289410 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.304329 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.318952 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.330922 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.343180 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.352861 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.364795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.364856 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.364869 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.364883 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.364893 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.467470 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.467508 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.467523 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.467554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.467566 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.570691 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.570720 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.570728 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.570744 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.570756 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.674645 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.674696 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.674711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.674732 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.674745 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.700053 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.712431 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.726061 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.747214 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.764564 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.776660 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.776943 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.776986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.777000 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.777018 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.777030 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.789611 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.805092 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.820199 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.838360 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.847285 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.857376 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.868789 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.879297 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.879336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.879351 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.879370 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.879389 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.883976 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.894720 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.982637 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.982700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.982715 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.982735 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:23 crc kubenswrapper[4576]: I1203 08:40:23.982747 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:23Z","lastTransitionTime":"2025-12-03T08:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.085687 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.085734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.085752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.085774 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.085790 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.126857 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.127839 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.128166 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.128483 4576 scope.go:117] "RemoveContainer" containerID="106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.176166 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c" exitCode=0 Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.176217 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.183085 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.188924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.188959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.188970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.188988 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.189000 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.200172 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.228796 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.243356 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.255837 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.259280 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.270274 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.285218 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.291346 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.291391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.291404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.291424 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.291436 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.299481 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.312550 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.323847 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.335809 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.353664 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.365030 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.377813 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.390087 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.396872 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.397010 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.397021 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.397033 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.397054 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.409324 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.421691 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.440208 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.456483 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.475195 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.488493 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.508577 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.508606 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.508615 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.508629 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.508638 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.515171 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.528761 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.541975 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.556159 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.570889 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.582352 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.600174 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.611795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.611828 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.611836 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.611850 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.611862 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.613079 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.627425 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.676950 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.676950 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.676967 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:24 crc kubenswrapper[4576]: E1203 08:40:24.677308 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:24 crc kubenswrapper[4576]: E1203 08:40:24.677204 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:24 crc kubenswrapper[4576]: E1203 08:40:24.677101 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.715111 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.715157 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.715169 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.715187 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.715200 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.817987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.818023 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.818035 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.818052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.818063 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.921200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.921267 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.921282 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.921298 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:24 crc kubenswrapper[4576]: I1203 08:40:24.921308 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:24Z","lastTransitionTime":"2025-12-03T08:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.022879 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.022914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.022924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.022942 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.022954 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.125372 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.125443 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.125462 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.125491 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.125514 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.184421 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.186276 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.186482 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.186732 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.190408 4576 generic.go:334] "Generic (PLEG): container finished" podID="72bb738c-5ba1-4104-8729-1a929fa6d2ba" containerID="f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3" exitCode=0 Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.190712 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerDied","Data":"f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.277310 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.277357 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.277368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.277384 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.277394 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.278985 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.280835 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.292770 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.306326 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.319413 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.330674 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.342713 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.352826 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.364371 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.375074 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.379467 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.379496 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.379506 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.379519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.379551 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.386519 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.400052 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.412462 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.426555 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.447246 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.457229 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.468252 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.480725 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.482968 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.483003 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.483012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.483026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.483036 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.499794 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.510322 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.523690 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.535040 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.548796 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.566046 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.586073 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.586122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.586137 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.586155 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.586175 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.588611 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.598379 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.618310 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.632517 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.644621 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.656250 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.688758 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.688968 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.688983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.688992 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.689004 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.689013 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.797926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.798020 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.798044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.798072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.798094 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.901911 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.902385 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.902627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.902829 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.903089 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:25Z","lastTransitionTime":"2025-12-03T08:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:25 crc kubenswrapper[4576]: I1203 08:40:25.977748 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.006888 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.007176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.007399 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.007647 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.007810 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.110831 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.110876 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.110890 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.110908 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.110922 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.117925 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.135469 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.150384 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.169505 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.191430 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.206494 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.223090 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.234800 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.242259 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.252438 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.255381 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.255416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.255436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.255457 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.255518 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.273988 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.275018 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" event={"ID":"72bb738c-5ba1-4104-8729-1a929fa6d2ba","Type":"ContainerStarted","Data":"d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.275468 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.286435 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.299905 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.360686 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.361772 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.361798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.361807 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.361819 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.361829 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.369973 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.381973 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.393509 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.414476 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.443726 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.463797 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.463825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.463833 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.463846 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.463856 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.501744 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.566247 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.566283 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.566295 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.566311 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.566322 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.584227 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.637834 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.654342 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.668707 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.669152 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.669174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.669182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.669194 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.669204 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.676733 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:26 crc kubenswrapper[4576]: E1203 08:40:26.676832 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.677064 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:26 crc kubenswrapper[4576]: E1203 08:40:26.677119 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.677157 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:26 crc kubenswrapper[4576]: E1203 08:40:26.677195 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.683124 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.722454 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.771580 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.771617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.771628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.771644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.771655 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.781680 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.806047 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.851676 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.874347 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.874415 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.874429 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.874452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.874466 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.887802 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:26Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.977299 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.977337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.977348 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.977363 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:26 crc kubenswrapper[4576]: I1203 08:40:26.977373 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:26Z","lastTransitionTime":"2025-12-03T08:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.079830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.079861 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.079869 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.079883 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.079892 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.182470 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.182505 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.182513 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.182547 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.182568 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.276369 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.284406 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.284463 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.284476 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.284493 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.284504 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.386690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.386730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.386740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.386754 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.386764 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.489573 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.489613 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.489622 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.489636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.489646 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.595682 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.595761 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.595787 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.595817 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.595850 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.698964 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.699023 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.699047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.699075 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.699097 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.801695 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.801740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.801752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.801767 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.801778 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.905011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.905045 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.905056 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.905071 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:27 crc kubenswrapper[4576]: I1203 08:40:27.905081 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:27Z","lastTransitionTime":"2025-12-03T08:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.008276 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.008348 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.008374 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.008402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.008427 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.110583 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.110624 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.110639 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.110661 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.110675 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.212724 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.212780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.212795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.212813 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.212827 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.282282 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/0.log" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.284420 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.285073 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285" exitCode=1 Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.285117 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.285911 4576 scope.go:117] "RemoveContainer" containerID="5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.312682 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.315458 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.315487 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.315499 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.315515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.315526 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.327704 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.341800 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.353414 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.364063 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.374909 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.386808 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.401496 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.411971 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.417968 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.418003 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.418013 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.418028 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.418041 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.421971 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.434628 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.446572 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.459844 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.471755 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.489647 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:28Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.520268 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.520309 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.520321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.520337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.520348 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.622894 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.623009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.623028 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.623044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.623054 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.676639 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.676688 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.676727 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:28 crc kubenswrapper[4576]: E1203 08:40:28.676742 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:28 crc kubenswrapper[4576]: E1203 08:40:28.676926 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:28 crc kubenswrapper[4576]: E1203 08:40:28.677128 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.726144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.726181 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.726191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.726207 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.726219 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.830262 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.830321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.830345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.830409 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.830437 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.933080 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.933151 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.933175 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.933207 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:28 crc kubenswrapper[4576]: I1203 08:40:28.933229 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:28Z","lastTransitionTime":"2025-12-03T08:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.035790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.035848 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.035867 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.035894 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.035913 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.138269 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.138309 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.138318 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.138334 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.138343 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.240633 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.240996 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.241011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.241028 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.241039 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.291019 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/0.log" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.292835 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.293562 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.293721 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.311396 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.323207 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.334889 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.343679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.343713 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.343727 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.343747 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.343761 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.347676 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.359952 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.374315 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.387332 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.402978 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.412109 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.421201 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.434633 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.445593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.445627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.445637 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.445652 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.445663 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.448363 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.459408 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.470100 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.487993 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.548554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.548585 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.548593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.548605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.548615 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.651261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.651294 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.651304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.651318 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.651327 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.753586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.753939 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.754085 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.754222 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.754408 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.857050 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.857442 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.857677 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.857830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.857972 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.909004 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6"] Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.909482 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.911577 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.916323 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.932801 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.949154 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.960928 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.960961 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.960972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.960985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.960996 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:29Z","lastTransitionTime":"2025-12-03T08:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.964657 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.977316 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.984861 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.985227 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfkfz\" (UniqueName: \"kubernetes.io/projected/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-kube-api-access-bfkfz\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.985393 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.985563 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:29 crc kubenswrapper[4576]: I1203 08:40:29.991124 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.001609 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:29Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.013389 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.039107 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.050473 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.060371 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.062972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.063013 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.063027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.063043 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.063063 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.071453 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.081291 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.086471 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.086783 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfkfz\" (UniqueName: \"kubernetes.io/projected/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-kube-api-access-bfkfz\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.086934 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.087054 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.087430 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.087741 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.100044 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.100387 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.107310 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfkfz\" (UniqueName: \"kubernetes.io/projected/fba9d9c6-a657-4ac0-99e4-5ec5babb64ab-kube-api-access-bfkfz\") pod \"ovnkube-control-plane-749d76644c-r47b6\" (UID: \"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.113019 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.126474 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.137094 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.166445 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.166503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.166520 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.166581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.166600 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.227021 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" Dec 03 08:40:30 crc kubenswrapper[4576]: W1203 08:40:30.248281 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfba9d9c6_a657_4ac0_99e4_5ec5babb64ab.slice/crio-32c6a06c679c3f189ecb33d4156a0219732a2e670797c6c0a9c2841795be149c WatchSource:0}: Error finding container 32c6a06c679c3f189ecb33d4156a0219732a2e670797c6c0a9c2841795be149c: Status 404 returned error can't find the container with id 32c6a06c679c3f189ecb33d4156a0219732a2e670797c6c0a9c2841795be149c Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.270592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.270669 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.270695 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.270725 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.270747 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.298302 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" event={"ID":"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab","Type":"ContainerStarted","Data":"32c6a06c679c3f189ecb33d4156a0219732a2e670797c6c0a9c2841795be149c"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.300256 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/1.log" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.301508 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/0.log" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.308924 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.309740 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2" exitCode=1 Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.309784 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.309890 4576 scope.go:117] "RemoveContainer" containerID="5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.310681 4576 scope.go:117] "RemoveContainer" containerID="ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.310937 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.322552 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.334989 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.347583 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.360510 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.371216 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.375686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.375738 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.375751 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.375771 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.375785 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.391151 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.412696 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.425902 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.437805 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.448445 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.461660 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.474725 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.481391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.481426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.481435 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.481449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.481459 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.488247 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.500775 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.519966 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.531187 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.583630 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.583672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.583687 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.583711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.583726 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.676231 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.676274 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.676357 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.676367 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.676426 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.676540 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.685357 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.685396 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.685404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.685418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.685428 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.710743 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.710787 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.710797 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.710825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.710835 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.722917 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.726780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.726823 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.726835 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.726851 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.726864 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.740268 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.744129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.744173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.744186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.744204 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.744218 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.755681 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.759304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.759350 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.759363 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.759382 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.759394 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.771463 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.775516 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.775584 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.775596 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.775613 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.775624 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.789893 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:30Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.790056 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.791752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.791791 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.791803 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.791821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.791833 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.794127 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.794287 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794332 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:40:46.794295753 +0000 UTC m=+54.180272737 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.794408 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794455 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.794476 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794493 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794493 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794509 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794517 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794556 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.794560 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794567 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794603 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:46.794580751 +0000 UTC m=+54.180557735 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794623 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:46.794615831 +0000 UTC m=+54.180592885 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794639 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:46.794630402 +0000 UTC m=+54.180607486 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794671 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: E1203 08:40:30.794710 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:46.794703014 +0000 UTC m=+54.180679998 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.894834 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.894879 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.894889 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.894903 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.894915 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.998324 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.998781 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.998985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.999127 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:30 crc kubenswrapper[4576]: I1203 08:40:30.999248 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:30Z","lastTransitionTime":"2025-12-03T08:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.101635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.101698 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.101716 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.101739 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.101755 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.204100 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.204333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.204436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.204515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.204608 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.307785 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.307816 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.307825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.307839 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.307848 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.314027 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/1.log" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.316997 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.319556 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" event={"ID":"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab","Type":"ContainerStarted","Data":"502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.319598 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" event={"ID":"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab","Type":"ContainerStarted","Data":"9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.334095 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.348501 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.361828 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.372787 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.384281 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.392945 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-pw7pk"] Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.393386 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: E1203 08:40:31.393454 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.397274 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.410669 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.410746 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.410761 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.410775 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.410785 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.419839 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.429674 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.444338 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.805989 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806107 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvsfj\" (UniqueName: \"kubernetes.io/projected/c87be72e-a53b-42c9-bb32-f56cd0febe24-kube-api-access-vvsfj\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806358 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806482 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:31 crc kubenswrapper[4576]: E1203 08:40:31.806657 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806858 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806888 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806916 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.806927 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.807029 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:31 crc kubenswrapper[4576]: E1203 08:40:31.807115 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.823936 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.839958 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.854626 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.873505 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.885244 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.897778 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.906809 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.906868 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvsfj\" (UniqueName: \"kubernetes.io/projected/c87be72e-a53b-42c9-bb32-f56cd0febe24-kube-api-access-vvsfj\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: E1203 08:40:31.907000 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:31 crc kubenswrapper[4576]: E1203 08:40:31.907075 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:32.407055272 +0000 UTC m=+39.793032256 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.908577 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.908700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.908715 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.908774 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.908794 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:31Z","lastTransitionTime":"2025-12-03T08:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.910664 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.922249 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvsfj\" (UniqueName: \"kubernetes.io/projected/c87be72e-a53b-42c9-bb32-f56cd0febe24-kube-api-access-vvsfj\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.922601 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.939991 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.952332 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.966388 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.975906 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:31 crc kubenswrapper[4576]: I1203 08:40:31.987971 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:31Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.003371 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.011012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.011144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.011213 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.011284 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.011347 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.016969 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.033462 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.046109 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.069145 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.082973 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.099272 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.114337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.114393 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.114408 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.114427 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.114441 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.118982 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.134821 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.153733 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:32Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.217210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.217243 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.217251 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.217265 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.217275 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.319817 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.320389 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.320551 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.320671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.320766 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.410927 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:32 crc kubenswrapper[4576]: E1203 08:40:32.411065 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:32 crc kubenswrapper[4576]: E1203 08:40:32.411117 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:33.411102605 +0000 UTC m=+40.797079589 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.423739 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.423764 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.423772 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.423785 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.423794 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.526484 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.526557 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.526571 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.526586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.526597 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.629916 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.630066 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.630168 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.630350 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.630435 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.676207 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.676215 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:32 crc kubenswrapper[4576]: E1203 08:40:32.676340 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:32 crc kubenswrapper[4576]: E1203 08:40:32.676444 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.733111 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.733647 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.733811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.733941 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.734157 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.836258 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.836316 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.836337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.836380 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.836409 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.939160 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.939203 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.939214 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.939230 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:32 crc kubenswrapper[4576]: I1203 08:40:32.939242 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:32Z","lastTransitionTime":"2025-12-03T08:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.041957 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.042017 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.042036 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.042058 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.042072 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.144819 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.144859 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.144872 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.144888 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.144901 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.247374 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.247416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.247425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.247441 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.247452 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.349896 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.349957 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.349970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.349986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.349995 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.421998 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:33 crc kubenswrapper[4576]: E1203 08:40:33.422192 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:33 crc kubenswrapper[4576]: E1203 08:40:33.422302 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:35.422279581 +0000 UTC m=+42.808256585 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.453105 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.453172 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.453186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.453209 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.453220 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.556116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.556163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.556175 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.556191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.556204 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.658698 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.658748 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.658759 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.658776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.658789 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.677080 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.677080 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:33 crc kubenswrapper[4576]: E1203 08:40:33.677278 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:33 crc kubenswrapper[4576]: E1203 08:40:33.677416 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.694388 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.710240 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.722953 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.735590 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.749190 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.760145 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.762445 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.762490 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.762509 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.762547 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.762561 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.771892 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.784451 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.800387 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.814342 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.826420 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.850899 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5649690ef9d0bed5577a03544e6450ad8893cdf676934590c2a0d8a423cae285\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:27Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:27.368961 5728 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.864836 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.865160 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.865181 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.865192 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.865208 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.865219 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.891322 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.907324 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.921557 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.937449 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.967596 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.967678 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.967699 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.967720 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:33 crc kubenswrapper[4576]: I1203 08:40:33.967734 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:33Z","lastTransitionTime":"2025-12-03T08:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.070973 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.071022 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.071034 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.071053 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.071066 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.174581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.174626 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.174637 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.174654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.174668 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.277466 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.277505 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.277513 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.277544 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.277557 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.380251 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.380324 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.380339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.380365 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.380383 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.484115 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.484432 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.484676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.484852 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.485044 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.588755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.589110 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.589321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.589471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.589661 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.676951 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.676952 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:34 crc kubenswrapper[4576]: E1203 08:40:34.677327 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:34 crc kubenswrapper[4576]: E1203 08:40:34.677642 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.693203 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.693257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.693269 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.693326 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.693340 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.854418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.854521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.854553 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.854581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.854601 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.958436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.958504 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.958519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.958575 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:34 crc kubenswrapper[4576]: I1203 08:40:34.958595 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:34Z","lastTransitionTime":"2025-12-03T08:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.061234 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.061289 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.061302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.061319 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.061330 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.165752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.166051 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.166268 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.166495 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.166740 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.269315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.269400 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.269420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.269445 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.269461 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.372431 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.372489 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.372512 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.372584 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.372611 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.454796 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:35 crc kubenswrapper[4576]: E1203 08:40:35.455503 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:35 crc kubenswrapper[4576]: E1203 08:40:35.456160 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:39.45606212 +0000 UTC m=+46.842039174 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.475711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.475744 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.475753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.475765 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.475774 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.579172 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.579212 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.579224 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.579242 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.579254 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.677086 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.677149 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:35 crc kubenswrapper[4576]: E1203 08:40:35.677908 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:35 crc kubenswrapper[4576]: E1203 08:40:35.680070 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.687570 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.687755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.687776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.687811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.687828 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.791735 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.792031 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.792116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.792186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.792253 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.901950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.902052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.902081 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.902117 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:35 crc kubenswrapper[4576]: I1203 08:40:35.902145 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:35Z","lastTransitionTime":"2025-12-03T08:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.005140 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.005208 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.005226 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.005256 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.005274 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.109148 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.109213 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.109230 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.109253 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.109267 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.211592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.211659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.211679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.211703 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.211719 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.315165 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.315213 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.315223 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.315241 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.315256 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.419097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.419150 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.419163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.419178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.419187 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.523124 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.523222 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.523244 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.523270 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.523290 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.626347 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.626396 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.626410 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.626428 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.626440 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.676698 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.676768 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:36 crc kubenswrapper[4576]: E1203 08:40:36.676895 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:36 crc kubenswrapper[4576]: E1203 08:40:36.676994 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.729882 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.729944 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.729956 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.729972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.729983 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.831907 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.831960 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.831971 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.831985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.831994 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.935120 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.935160 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.935174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.935189 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:36 crc kubenswrapper[4576]: I1203 08:40:36.935198 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:36Z","lastTransitionTime":"2025-12-03T08:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.038219 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.038300 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.038314 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.038333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.038351 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.140885 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.140914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.140925 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.140949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.140960 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.243753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.243800 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.243842 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.243862 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.243880 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.346719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.346783 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.346836 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.346866 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.346885 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.451780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.451902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.451971 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.452005 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.452068 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.556076 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.556132 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.556153 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.556173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.556189 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.671092 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.671155 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.671168 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.671186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.671200 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.676578 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.676655 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:37 crc kubenswrapper[4576]: E1203 08:40:37.676757 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:37 crc kubenswrapper[4576]: E1203 08:40:37.676913 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.773949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.773987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.773996 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.774009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.774018 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.879013 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.879121 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.879151 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.879177 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.879190 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.982519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.982627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.982649 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.982679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:37 crc kubenswrapper[4576]: I1203 08:40:37.982706 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:37Z","lastTransitionTime":"2025-12-03T08:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.085552 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.085592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.085602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.085617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.085627 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.188515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.188586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.188602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.188622 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.188636 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.290990 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.291025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.291037 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.291053 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.291066 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.392888 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.392970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.392986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.393004 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.393016 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.495450 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.495508 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.495558 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.495587 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.495603 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.598278 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.598312 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.598321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.598334 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.598343 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.676941 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.676968 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:38 crc kubenswrapper[4576]: E1203 08:40:38.677132 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:38 crc kubenswrapper[4576]: E1203 08:40:38.677224 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.701259 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.701342 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.701365 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.701415 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.701442 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.804358 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.804403 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.804414 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.804432 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.804446 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.907135 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.907175 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.907189 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.907207 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:38 crc kubenswrapper[4576]: I1203 08:40:38.907221 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:38Z","lastTransitionTime":"2025-12-03T08:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.010368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.010419 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.010431 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.010448 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.010459 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.112685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.112747 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.112760 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.112776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.112788 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.167165 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.169137 4576 scope.go:117] "RemoveContainer" containerID="ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2" Dec 03 08:40:39 crc kubenswrapper[4576]: E1203 08:40:39.169850 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.193622 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.215620 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.216392 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.216652 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.216810 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.216956 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.217092 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.232132 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.246612 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.262750 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.275195 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.295067 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.310630 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.320201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.320248 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.320261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.320285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.320319 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.321352 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.338290 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.349356 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.365104 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.380637 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.400555 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.423135 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.423185 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.423200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.423221 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.423236 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.424689 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.437674 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.453313 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:39Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.508577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:39 crc kubenswrapper[4576]: E1203 08:40:39.508784 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:39 crc kubenswrapper[4576]: E1203 08:40:39.508881 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:40:47.508849546 +0000 UTC m=+54.894826550 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.526199 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.526275 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.526290 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.526337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.526350 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.629149 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.630085 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.630285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.630444 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.630650 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.677274 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.677350 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:39 crc kubenswrapper[4576]: E1203 08:40:39.677461 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:39 crc kubenswrapper[4576]: E1203 08:40:39.677635 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.734572 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.734669 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.734681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.734700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.734712 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.837947 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.838281 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.838473 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.838697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.838961 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.942144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.942184 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.942201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.942229 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:39 crc kubenswrapper[4576]: I1203 08:40:39.942245 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:39Z","lastTransitionTime":"2025-12-03T08:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.044882 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.044935 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.044951 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.045250 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.045279 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.148742 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.148784 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.148796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.148814 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.148827 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.259802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.259835 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.259846 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.259864 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.259876 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.363466 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.363599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.363615 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.363635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.363649 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.466212 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.466297 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.466315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.466367 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.466385 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.569057 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.569104 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.569119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.569137 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.569148 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.671825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.671898 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.671951 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.671983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.672004 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.676345 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.676396 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:40 crc kubenswrapper[4576]: E1203 08:40:40.676454 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:40 crc kubenswrapper[4576]: E1203 08:40:40.676566 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.775106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.775205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.775224 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.775280 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.775301 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.878670 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.878754 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.878773 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.878798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.878819 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.982043 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.982115 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.982133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.982166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:40 crc kubenswrapper[4576]: I1203 08:40:40.982185 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:40Z","lastTransitionTime":"2025-12-03T08:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.085383 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.085452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.085464 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.085503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.085517 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.128643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.128690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.128701 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.128719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.128733 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.145891 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.150294 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.150926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.150976 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.150989 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.151008 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.151019 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.161416 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.166394 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.169516 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.172519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.172645 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.172664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.172731 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.172754 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.186858 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.191064 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.195901 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.195959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.195980 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.196005 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.196023 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.212936 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.216334 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.221726 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.221765 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.221776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.221820 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.221836 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.226597 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.237627 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.237905 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239366 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239388 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239398 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239417 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239429 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.239479 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.251319 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.264142 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.275475 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.286970 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.297484 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.316143 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.327136 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.339967 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.341202 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.341238 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.341249 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.341266 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.341277 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.355970 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.366055 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.383427 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.396507 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:41Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.444027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.444081 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.444097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.444119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.444132 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.547177 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.547254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.547275 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.547296 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.547341 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.650356 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.650408 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.650420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.650439 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.650453 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.676852 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.677002 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.677133 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:41 crc kubenswrapper[4576]: E1203 08:40:41.677701 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.752983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.753031 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.753044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.753061 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.753075 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.855823 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.855872 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.855884 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.855902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.855914 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.959088 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.959143 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.959156 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.959174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:41 crc kubenswrapper[4576]: I1203 08:40:41.959186 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:41Z","lastTransitionTime":"2025-12-03T08:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.062257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.062307 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.062319 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.062336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.062349 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.165662 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.165714 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.165728 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.165747 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.165758 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.270030 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.270114 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.270132 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.270159 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.270177 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.373221 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.373273 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.373287 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.373300 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.373310 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.476352 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.476390 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.476403 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.476420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.476430 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.579393 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.579443 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.579457 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.579473 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.579485 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.676494 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.676583 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:42 crc kubenswrapper[4576]: E1203 08:40:42.676724 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:42 crc kubenswrapper[4576]: E1203 08:40:42.676830 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.683075 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.683140 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.683158 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.683181 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.683200 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.785707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.785754 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.785763 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.785778 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.785787 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.889007 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.889068 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.889084 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.889109 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.889123 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.992066 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.992114 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.992124 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.992140 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:42 crc kubenswrapper[4576]: I1203 08:40:42.992150 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:42Z","lastTransitionTime":"2025-12-03T08:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.095748 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.095793 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.095804 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.095821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.095834 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.199246 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.199321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.199339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.199361 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.199377 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.302178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.302269 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.302281 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.302325 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.302340 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.405307 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.405360 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.405372 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.405391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.405399 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.508591 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.508632 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.508644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.508658 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.508667 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.613458 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.614056 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.614243 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.614405 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.614565 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.676487 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:43 crc kubenswrapper[4576]: E1203 08:40:43.676791 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.676515 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:43 crc kubenswrapper[4576]: E1203 08:40:43.677250 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.692927 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.705780 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.717755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.717966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.718038 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.718103 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.718178 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.722791 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.734122 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.744704 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.757794 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.773429 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.785233 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.800066 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.811072 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.820563 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.820599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.820610 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.820626 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.820638 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.829277 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.848171 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.861745 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.874478 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.891702 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.907411 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922566 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922620 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922647 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:43Z","lastTransitionTime":"2025-12-03T08:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.922727 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:43 crc kubenswrapper[4576]: I1203 08:40:43.939516 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:43Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.025483 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.025595 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.025616 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.025641 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.025660 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.128477 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.128541 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.128554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.128570 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.128581 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.231142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.231217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.231243 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.231274 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.231302 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.338607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.338668 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.338697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.338738 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.338760 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.441162 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.441206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.441218 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.441233 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.441245 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.543310 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.543359 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.543369 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.543384 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.543394 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.646021 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.646067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.646078 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.646094 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.646105 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.676602 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:44 crc kubenswrapper[4576]: E1203 08:40:44.676771 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.677163 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:44 crc kubenswrapper[4576]: E1203 08:40:44.677390 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.748642 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.748867 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.748930 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.748999 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.749066 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.853099 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.853174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.853194 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.853220 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.853239 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.956655 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.957152 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.957352 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.957593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:44 crc kubenswrapper[4576]: I1203 08:40:44.957802 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:44Z","lastTransitionTime":"2025-12-03T08:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.060188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.060232 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.060242 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.060278 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.060288 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.162840 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.163168 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.163287 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.163449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.163617 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.267290 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.267703 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.267879 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.268009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.268144 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.370986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.371029 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.371043 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.371060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.371071 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.474245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.474751 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.474981 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.475309 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.475552 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.579244 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.580144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.580452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.580726 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.581003 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.676735 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.676811 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:45 crc kubenswrapper[4576]: E1203 08:40:45.677251 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:45 crc kubenswrapper[4576]: E1203 08:40:45.677518 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.682797 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.682865 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.682883 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.682937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.682955 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.785811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.786095 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.786179 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.786261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.786341 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.889681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.889721 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.889731 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.889745 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.889754 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.992322 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.992362 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.992372 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.992388 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:45 crc kubenswrapper[4576]: I1203 08:40:45.992397 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:45Z","lastTransitionTime":"2025-12-03T08:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.095345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.095391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.095403 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.095420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.095432 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.198320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.198363 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.198376 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.198391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.198400 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.302636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.302911 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.303023 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.303093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.303149 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.405055 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.405312 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.405375 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.405451 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.405511 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.508762 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.508858 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.508878 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.508902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.508923 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.618416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.618504 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.618565 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.618599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.618626 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.676881 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.676946 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.677086 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.677287 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.721072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.721100 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.721107 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.721136 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.721145 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.796639 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.796759 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.796818 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.796859 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.796894 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797027 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797106 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:18.797084461 +0000 UTC m=+86.183061455 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797251 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:41:18.797238935 +0000 UTC m=+86.183215929 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797365 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797401 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:18.797391808 +0000 UTC m=+86.183368802 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797491 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797516 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797581 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797624 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:18.797610984 +0000 UTC m=+86.183587978 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797698 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797711 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797721 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:46 crc kubenswrapper[4576]: E1203 08:40:46.797745 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:18.797737767 +0000 UTC m=+86.183714761 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.828066 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.828339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.828467 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.828494 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.828517 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.931841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.931940 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.931965 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.932020 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:46 crc kubenswrapper[4576]: I1203 08:40:46.932040 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:46Z","lastTransitionTime":"2025-12-03T08:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.034753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.034790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.034802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.034819 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.034830 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.137478 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.137553 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.137568 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.137586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.137601 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.240077 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.240119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.240131 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.240148 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.240160 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.342900 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.342932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.342942 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.342957 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.342966 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.445409 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.445444 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.445455 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.445470 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.445481 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.549461 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.549566 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.549593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.549623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.549645 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.606431 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:47 crc kubenswrapper[4576]: E1203 08:40:47.606726 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:47 crc kubenswrapper[4576]: E1203 08:40:47.606868 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:03.606838507 +0000 UTC m=+70.992815521 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.652392 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.652439 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.652452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.652468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.652480 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.676222 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.676258 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:47 crc kubenswrapper[4576]: E1203 08:40:47.676468 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:47 crc kubenswrapper[4576]: E1203 08:40:47.676511 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.754735 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.754800 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.754811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.754830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.754841 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.856904 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.856953 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.856970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.856989 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.857004 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.959283 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.959326 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.959334 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.959350 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:47 crc kubenswrapper[4576]: I1203 08:40:47.959359 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:47Z","lastTransitionTime":"2025-12-03T08:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.062334 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.062404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.062426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.062455 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.062474 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.164723 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.164777 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.164792 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.164810 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.164822 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.267119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.267179 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.267191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.267206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.267215 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.370987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.371044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.371055 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.371073 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.371085 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.473180 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.473247 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.473257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.473272 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.473281 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.577804 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.577874 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.577897 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.577928 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.577952 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.677257 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.677275 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:48 crc kubenswrapper[4576]: E1203 08:40:48.677509 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:48 crc kubenswrapper[4576]: E1203 08:40:48.677726 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.680817 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.680880 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.680905 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.680937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.680961 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.783796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.783875 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.783901 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.783939 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.783964 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.887385 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.887448 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.887471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.887490 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.887503 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.989504 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.989572 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.989585 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.989605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:48 crc kubenswrapper[4576]: I1203 08:40:48.989617 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:48Z","lastTransitionTime":"2025-12-03T08:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.092123 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.092162 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.092172 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.092188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.092201 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.193842 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.193910 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.193921 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.193937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.193948 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.299122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.299191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.299202 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.299216 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.299251 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.401517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.401560 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.401569 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.401582 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.401591 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.529719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.529778 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.529791 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.529811 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.529823 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.632256 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.632288 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.632298 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.632315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.632328 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.679855 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:49 crc kubenswrapper[4576]: E1203 08:40:49.680067 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.680316 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:49 crc kubenswrapper[4576]: E1203 08:40:49.680455 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.734898 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.734946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.734958 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.734976 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.734988 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.838432 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.838520 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.838592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.838620 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.838640 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.942224 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.942320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.942348 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.942388 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:49 crc kubenswrapper[4576]: I1203 08:40:49.942413 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:49Z","lastTransitionTime":"2025-12-03T08:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.045574 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.045659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.045706 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.045731 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.045756 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.148665 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.148705 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.148717 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.148733 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.148749 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.252321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.252388 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.252397 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.252418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.252431 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.355115 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.355152 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.355161 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.355173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.355182 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.458186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.458265 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.458301 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.458324 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.458338 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.562139 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.562230 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.562248 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.562300 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.562320 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.665143 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.665220 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.665251 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.665271 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.665286 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.676375 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.676379 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:50 crc kubenswrapper[4576]: E1203 08:40:50.676516 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:50 crc kubenswrapper[4576]: E1203 08:40:50.676701 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.773905 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.773946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.773957 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.773975 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.773987 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.877478 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.877611 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.877638 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.877668 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.877694 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.980403 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.980478 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.980497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.980520 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:50 crc kubenswrapper[4576]: I1203 08:40:50.980571 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:50Z","lastTransitionTime":"2025-12-03T08:40:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.087349 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.087406 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.087424 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.087449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.087467 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.190481 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.190585 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.190602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.190623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.190640 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.245231 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.245288 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.245304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.245328 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.245346 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.267682 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:51Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.272368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.272435 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.272446 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.272463 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.272474 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.287194 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:51Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.292934 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.293010 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.293036 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.293068 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.293093 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.307807 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:51Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.317097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.317164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.317181 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.317205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.317223 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.333627 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:51Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.339179 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.339515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.339578 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.339604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.339621 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.354619 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:51Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.354778 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.356715 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.356763 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.356779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.356799 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.356815 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.459660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.459685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.459692 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.459706 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.459716 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.562728 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.562763 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.562776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.562792 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.562803 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.665874 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.665913 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.665924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.665947 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.665964 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.677264 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.677441 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.677273 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:51 crc kubenswrapper[4576]: E1203 08:40:51.677597 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.768728 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.768770 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.768779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.768794 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.768806 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.871164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.871210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.871225 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.871244 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.871257 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.973492 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.973570 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.973582 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.973600 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:51 crc kubenswrapper[4576]: I1203 08:40:51.973615 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:51Z","lastTransitionTime":"2025-12-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.078238 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.078321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.078345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.078383 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.078409 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.182451 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.182523 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.182574 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.182604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.182622 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.286337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.286420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.286449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.286485 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.286504 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.389360 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.389407 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.389421 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.389439 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.389450 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.493627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.493699 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.493722 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.493756 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.493775 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.597209 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.597274 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.597294 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.597320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.597337 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.676981 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.676985 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:52 crc kubenswrapper[4576]: E1203 08:40:52.677600 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:52 crc kubenswrapper[4576]: E1203 08:40:52.677701 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.705147 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.705205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.705217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.705240 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.705255 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.809606 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.809652 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.809664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.809681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.809695 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.913676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.913752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.913770 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.913791 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:52 crc kubenswrapper[4576]: I1203 08:40:52.913803 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:52Z","lastTransitionTime":"2025-12-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.017216 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.017271 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.017290 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.017308 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.017320 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.122559 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.122642 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.122654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.122679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.122700 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.226433 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.226497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.226520 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.226592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.226610 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.330483 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.330594 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.330622 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.330645 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.330660 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.433980 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.434024 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.434034 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.434050 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.434064 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.538942 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.539229 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.539356 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.539483 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.539624 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.643946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.644024 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.644070 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.644106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.644125 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.677322 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.677361 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:53 crc kubenswrapper[4576]: E1203 08:40:53.678007 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:53 crc kubenswrapper[4576]: E1203 08:40:53.678088 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.702223 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.717017 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.735368 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.746571 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.746925 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.746950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.746984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.746998 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.747006 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.761362 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.773852 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.786684 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.803610 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.816615 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.830432 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.840242 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.851917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.851946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.851954 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.851967 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.851977 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.852500 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.863219 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.875093 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.887236 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.900251 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.911583 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.933368 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:53Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.954710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.954753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.954765 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.954781 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:53 crc kubenswrapper[4576]: I1203 08:40:53.954791 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:53Z","lastTransitionTime":"2025-12-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.057604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.058707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.058927 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.059158 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.059360 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.163060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.163094 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.163104 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.163119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.163128 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.265989 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.266029 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.266041 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.266057 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.266070 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.369616 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.369671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.369681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.369701 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.369718 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.472751 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.472804 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.472817 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.472830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.472839 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.574970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.575017 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.575033 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.575058 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.575094 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.676347 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.676384 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:54 crc kubenswrapper[4576]: E1203 08:40:54.676520 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:54 crc kubenswrapper[4576]: E1203 08:40:54.676896 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.677289 4576 scope.go:117] "RemoveContainer" containerID="ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.679272 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.679302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.679311 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.679323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.679333 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.782635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.782676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.782685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.782703 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.782713 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.922732 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.922767 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.922778 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.922795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:54 crc kubenswrapper[4576]: I1203 08:40:54.922807 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:54Z","lastTransitionTime":"2025-12-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.024826 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.024848 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.024857 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.024870 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.024878 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.171985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.172018 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.172026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.172039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.172049 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.306793 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.306857 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.306877 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.306903 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.306921 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.414710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.414757 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.414766 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.414793 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.414804 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.518006 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.518063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.518075 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.518090 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.518100 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.529150 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/1.log" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.532357 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.533085 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.533560 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.545430 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.564431 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.575427 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.588863 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.600360 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.619467 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.620605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.620644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.620654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.620671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.620683 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.638900 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.653728 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.670107 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.676876 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:55 crc kubenswrapper[4576]: E1203 08:40:55.676984 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.677175 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:55 crc kubenswrapper[4576]: E1203 08:40:55.677238 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.693822 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.708965 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.726759 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.726806 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.726815 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.726828 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.726837 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.734351 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.749771 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.774819 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.784768 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.796964 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.811976 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.828340 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:55Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.829603 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.829637 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.829650 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.829676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.829688 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.931714 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.931751 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.931762 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.931779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:55 crc kubenswrapper[4576]: I1203 08:40:55.931791 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:55Z","lastTransitionTime":"2025-12-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.033565 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.033607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.033617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.033633 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.033643 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.135693 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.135729 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.135741 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.135757 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.135770 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.241616 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.241643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.241651 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.241664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.241672 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.343924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.343969 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.343979 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.343993 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.344003 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.447808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.447854 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.447871 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.447895 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.447912 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.540288 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/2.log" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.541350 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/1.log" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.545948 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.546900 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" exitCode=1 Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.546949 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.546993 4576 scope.go:117] "RemoveContainer" containerID="ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.547612 4576 scope.go:117] "RemoveContainer" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" Dec 03 08:40:56 crc kubenswrapper[4576]: E1203 08:40:56.547802 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.552861 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.552912 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.553232 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.553285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.553303 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.565767 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.579484 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.595651 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.609607 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.624801 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.655860 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.655904 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.655920 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.655942 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.655958 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.676988 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:56 crc kubenswrapper[4576]: E1203 08:40:56.677125 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.677312 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:56 crc kubenswrapper[4576]: E1203 08:40:56.677402 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.678343 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.700348 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.734457 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.754877 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ada85ef4466c7ef967979027602ff555ef3717bd290edc5341cd81fd11f65bf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:40:29.877056 5919 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI1203 08:40:29.877090 5919 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI1203 08:40:29.877104 5919 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI1203 08:40:29.877141 5919 factory.go:1336] Added *v1.Node event handler 7\\\\nI1203 08:40:29.877160 5919 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1203 08:40:29.877419 5919 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1203 08:40:29.877514 5919 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1203 08:40:29.877552 5919 ovnkube.go:599] Stopped ovnkube\\\\nI1203 08:40:29.877569 5919 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1203 08:40:29.877621 5919 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.758418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.758440 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.758449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.758462 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.758472 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.765902 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.777338 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.803958 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.817967 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.831985 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.845205 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.860079 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.860963 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.860998 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.861008 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.861022 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.861031 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.873008 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.886093 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:56Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.963728 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.963764 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.963772 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.963790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:56 crc kubenswrapper[4576]: I1203 08:40:56.963800 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:56Z","lastTransitionTime":"2025-12-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.066642 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.066690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.066705 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.066726 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.066741 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.169277 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.169321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.169331 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.169345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.169354 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.271959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.272014 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.272025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.272040 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.272049 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.374823 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.374878 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.374889 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.374912 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.374930 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.477482 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.477516 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.477545 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.477560 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.477568 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.552582 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/2.log" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.555022 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.557595 4576 scope.go:117] "RemoveContainer" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" Dec 03 08:40:57 crc kubenswrapper[4576]: E1203 08:40:57.557778 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.574841 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.579620 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.579671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.579685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.579702 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.579744 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.591987 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.606112 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.621214 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.636278 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.650834 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.671356 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.677730 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.677822 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:57 crc kubenswrapper[4576]: E1203 08:40:57.677866 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:57 crc kubenswrapper[4576]: E1203 08:40:57.677964 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.681860 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.681928 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.681941 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.681955 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.681966 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.684944 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.695624 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.705829 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.719269 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.729826 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.749407 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.762086 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.782303 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.783773 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.783818 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.783830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.783849 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.783866 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.796304 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.807859 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.820799 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:40:57Z is after 2025-08-24T17:21:41Z" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.886600 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.886638 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.886649 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.886666 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:57 crc kubenswrapper[4576]: I1203 08:40:57.886675 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:57Z","lastTransitionTime":"2025-12-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.008025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.008067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.008076 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.008091 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.008100 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.110719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.110746 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.110755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.110769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.110777 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.215843 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.215882 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.215892 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.215909 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.215921 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.318581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.318644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.318660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.318685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.318700 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.421120 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.421217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.421232 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.421305 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.421322 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.523561 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.523601 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.523609 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.523622 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.523630 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.626254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.626290 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.626302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.626317 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.626328 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.677079 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.677079 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:40:58 crc kubenswrapper[4576]: E1203 08:40:58.677200 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:40:58 crc kubenswrapper[4576]: E1203 08:40:58.677263 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.728506 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.728558 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.728572 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.728591 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.728605 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.831106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.831174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.831192 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.831690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.831716 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.934646 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.934678 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.934689 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.934707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:58 crc kubenswrapper[4576]: I1203 08:40:58.934718 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:58Z","lastTransitionTime":"2025-12-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.038124 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.038176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.038191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.038210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.038221 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.141011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.141047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.141059 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.141076 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.141089 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.243201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.243250 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.243260 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.243272 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.243281 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.346852 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.346905 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.346924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.346949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.346970 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.449558 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.449635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.449646 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.449663 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.449676 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.552444 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.552481 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.552490 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.552508 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.552518 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.655598 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.655644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.655656 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.655673 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.655684 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.677120 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.677165 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:40:59 crc kubenswrapper[4576]: E1203 08:40:59.677258 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:40:59 crc kubenswrapper[4576]: E1203 08:40:59.677320 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.759119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.759200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.759224 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.759692 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.759969 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.863229 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.863289 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.863308 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.863331 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.863348 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.965380 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.965408 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.965416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.965430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:40:59 crc kubenswrapper[4576]: I1203 08:40:59.965439 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:40:59Z","lastTransitionTime":"2025-12-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.068052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.068083 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.068092 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.068106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.068114 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.170678 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.170723 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.170733 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.170750 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.170760 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.273436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.273470 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.273479 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.273493 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.273502 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.375745 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.375786 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.375795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.375810 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.375821 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.478664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.478711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.478725 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.478743 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.478755 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.580604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.580658 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.580673 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.580692 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.580708 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.676451 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.676564 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:00 crc kubenswrapper[4576]: E1203 08:41:00.676608 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:00 crc kubenswrapper[4576]: E1203 08:41:00.676715 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.682754 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.682787 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.682800 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.682816 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.682827 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.785759 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.785795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.785820 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.785837 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.785855 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.888392 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.888438 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.888449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.888467 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.888477 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.991116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.991164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.991176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.991195 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:00 crc kubenswrapper[4576]: I1203 08:41:00.991207 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:00Z","lastTransitionTime":"2025-12-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.094347 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.094389 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.094401 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.094416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.094427 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.196522 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.196576 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.196589 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.196604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.196616 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.299120 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.299153 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.299165 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.299182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.299194 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.401170 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.401198 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.401206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.401219 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.401228 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.503946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.503995 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.504009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.504025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.504037 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.607440 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.607482 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.607491 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.607507 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.607539 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.676197 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.676204 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.676376 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.676456 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.696091 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.696133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.696144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.696161 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.696173 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.709449 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:01Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.712764 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.712808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.712821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.712839 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.712851 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.725484 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:01Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.728984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.729016 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.729025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.729042 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.729052 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.740923 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:01Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.745586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.745616 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.745625 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.745641 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.745651 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.758486 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:01Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.763294 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.763330 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.763339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.763352 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.763361 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.780781 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:01Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:01 crc kubenswrapper[4576]: E1203 08:41:01.780922 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.782672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.782821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.782959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.783106 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.783240 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.887323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.887383 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.887406 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.887430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.887447 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.989714 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.989755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.989774 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.989790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:01 crc kubenswrapper[4576]: I1203 08:41:01.989800 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:01Z","lastTransitionTime":"2025-12-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.093225 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.093288 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.093308 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.093344 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.093365 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.196448 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.196890 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.196994 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.197084 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.197263 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.300417 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.300458 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.300469 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.300485 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.300497 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.403588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.403636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.403648 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.403668 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.403680 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.506879 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.506933 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.506959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.506990 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.507007 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.609895 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.609959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.609970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.609987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.609997 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.677134 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.677141 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:02 crc kubenswrapper[4576]: E1203 08:41:02.677986 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:02 crc kubenswrapper[4576]: E1203 08:41:02.678288 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.713136 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.713217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.713240 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.713270 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.713291 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.819554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.819618 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.819636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.819672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.819686 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.922402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.922447 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.922459 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.922476 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:02 crc kubenswrapper[4576]: I1203 08:41:02.922488 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:02Z","lastTransitionTime":"2025-12-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.025419 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.025475 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.025492 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.025515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.025568 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.130055 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.130133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.130142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.130166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.130186 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.233915 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.233993 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.234020 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.234357 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.234398 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.337325 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.337389 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.337402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.337426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.337437 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.439787 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.439821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.439833 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.439848 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.439868 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.543453 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.543486 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.543499 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.543517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.543538 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.612206 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:03 crc kubenswrapper[4576]: E1203 08:41:03.612625 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:41:03 crc kubenswrapper[4576]: E1203 08:41:03.612825 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:41:35.612763911 +0000 UTC m=+102.998740905 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.665875 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.665926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.665936 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.665950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.665964 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.680911 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.680959 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:03 crc kubenswrapper[4576]: E1203 08:41:03.681185 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:03 crc kubenswrapper[4576]: E1203 08:41:03.681390 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.696734 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.712432 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.725065 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.743272 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.754689 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.767956 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.768607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.768648 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.768661 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.768679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.768691 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.777497 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.789575 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.804102 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.823062 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.834124 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.852093 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.863038 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.871376 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.871400 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.871410 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.871426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.871437 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.874998 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.896453 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.907567 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.919661 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.929347 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:03Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.973966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.973999 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.974010 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.974039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:03 crc kubenswrapper[4576]: I1203 08:41:03.974050 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:03Z","lastTransitionTime":"2025-12-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.076351 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.076404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.076418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.076436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.076449 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.179371 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.179756 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.179884 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.180022 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.180190 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.282627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.282658 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.282666 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.282680 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.282691 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.385599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.386016 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.386159 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.386295 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.386427 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.489630 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.489660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.489669 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.489682 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.489690 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.591567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.591887 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.591986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.592080 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.592168 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.676796 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.676796 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:04 crc kubenswrapper[4576]: E1203 08:41:04.677596 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:04 crc kubenswrapper[4576]: E1203 08:41:04.677601 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.695635 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.695673 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.695686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.695704 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.695715 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.798684 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.798727 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.798740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.798757 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.798767 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.902826 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.902859 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.902868 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.902881 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:04 crc kubenswrapper[4576]: I1203 08:41:04.902890 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:04Z","lastTransitionTime":"2025-12-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.005137 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.005180 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.005190 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.005207 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.005219 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.108625 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.108656 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.108665 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.108689 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.108699 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.211604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.211659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.211673 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.211690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.211702 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.314295 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.314335 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.314346 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.314361 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.314371 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.416556 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.416602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.416618 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.416639 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.416655 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.518710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.518748 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.518760 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.518776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.518788 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.585773 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/0.log" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.585850 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2b7eac2-6611-49d0-9da5-f97a3ccc9529" containerID="87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa" exitCode=1 Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.585918 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerDied","Data":"87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.586568 4576 scope.go:117] "RemoveContainer" containerID="87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.607553 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.623897 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.625356 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.625460 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.625591 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.625689 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.631282 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.650513 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.663818 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.678579 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:05 crc kubenswrapper[4576]: E1203 08:41:05.678687 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.678812 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:05 crc kubenswrapper[4576]: E1203 08:41:05.679908 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.694908 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.699094 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.711416 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.724857 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.729254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.729402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.729497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.729613 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.729714 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.744920 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.758149 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.771341 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.783767 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.795315 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.816421 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.831967 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.833119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.833262 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.833380 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.834205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.834409 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.854185 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.873978 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.892600 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.905739 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:05Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.937559 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.937595 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.937607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.937627 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:05 crc kubenswrapper[4576]: I1203 08:41:05.937668 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:05Z","lastTransitionTime":"2025-12-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.040476 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.040623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.040696 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.040729 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.040752 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.143683 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.143746 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.143769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.143797 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.143818 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.246349 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.246707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.246850 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.247052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.247193 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.350734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.350771 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.350780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.350796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.350806 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.453730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.453802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.453827 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.453858 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.453919 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.556940 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.556994 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.557003 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.557017 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.557025 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.591175 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/0.log" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.591331 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerStarted","Data":"43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.608120 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.627273 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.647444 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.660243 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.660285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.660299 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.660324 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.660342 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.669304 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.676969 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.677183 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:06 crc kubenswrapper[4576]: E1203 08:41:06.677284 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:06 crc kubenswrapper[4576]: E1203 08:41:06.677493 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.682072 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.697425 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.709857 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.722456 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.736160 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.750824 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.763161 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.763201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.763214 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.763227 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.763236 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.767847 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.787869 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.799749 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.813201 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.826987 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bca0d4b-1e77-448b-b450-9dc02d46fa22\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.853431 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.866946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.867025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.867052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.867082 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.867104 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.871546 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.886019 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.899755 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:06Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.970281 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.970339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.970348 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.970362 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:06 crc kubenswrapper[4576]: I1203 08:41:06.970372 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:06Z","lastTransitionTime":"2025-12-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.073087 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.073426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.073567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.073677 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.073821 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.176456 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.176512 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.176570 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.176604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.176632 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.278984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.279023 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.279032 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.279046 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.279055 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.382634 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.382964 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.383064 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.383151 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.383258 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.486288 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.486521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.486628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.486751 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.486836 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.589706 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.589763 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.589779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.589799 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.589814 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.676577 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:07 crc kubenswrapper[4576]: E1203 08:41:07.676788 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.677108 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:07 crc kubenswrapper[4576]: E1203 08:41:07.677385 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.691632 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.691681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.691697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.691718 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.691729 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.794917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.794956 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.794967 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.794983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.794993 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.897880 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.897926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.897988 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.898006 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:07 crc kubenswrapper[4576]: I1203 08:41:07.898015 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:07Z","lastTransitionTime":"2025-12-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.000498 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.000602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.000621 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.000646 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.000663 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.105469 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.105799 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.105934 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.106060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.106246 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.210072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.210437 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.210608 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.210739 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.210877 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.312873 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.313231 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.313447 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.313565 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.313665 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.416592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.416949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.417093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.417295 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.417435 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.520964 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.521316 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.521586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.521784 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.521984 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.625285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.626254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.626498 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.626946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.627156 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.676896 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.676924 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:08 crc kubenswrapper[4576]: E1203 08:41:08.677020 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:08 crc kubenswrapper[4576]: E1203 08:41:08.677141 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.731311 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.731817 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.731962 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.732089 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.732221 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.834924 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.834953 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.834964 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.834978 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.834987 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.937564 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.937601 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.937613 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.937631 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:08 crc kubenswrapper[4576]: I1203 08:41:08.937646 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:08Z","lastTransitionTime":"2025-12-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.040075 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.040132 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.040149 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.040174 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.040192 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.143395 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.143469 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.143492 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.143523 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.143588 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.246425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.246491 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.246509 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.246557 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.246577 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.349870 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.350030 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.350060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.350087 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.350118 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.454622 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.454711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.454730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.454756 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.454776 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.558261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.558302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.558315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.558337 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.558353 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.661513 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.661623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.661650 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.661681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.661704 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.676751 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.676816 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:09 crc kubenswrapper[4576]: E1203 08:41:09.676917 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:09 crc kubenswrapper[4576]: E1203 08:41:09.677103 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.764998 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.765069 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.765102 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.765129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.765153 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.873136 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.873207 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.873226 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.873254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.873286 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.977395 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.977459 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.977477 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.977502 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:09 crc kubenswrapper[4576]: I1203 08:41:09.977520 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:09Z","lastTransitionTime":"2025-12-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.080496 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.080611 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.080636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.080667 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.080690 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.184970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.185034 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.185051 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.185086 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.185104 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.287863 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.288133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.288200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.288273 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.288449 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.391506 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.391630 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.391655 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.391685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.391708 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.494273 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.494323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.494336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.494355 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.494366 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.596738 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.597016 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.597095 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.597165 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.597236 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.677102 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.677134 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:10 crc kubenswrapper[4576]: E1203 08:41:10.677323 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:10 crc kubenswrapper[4576]: E1203 08:41:10.677444 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.678708 4576 scope.go:117] "RemoveContainer" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" Dec 03 08:41:10 crc kubenswrapper[4576]: E1203 08:41:10.679033 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.699759 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.699820 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.699841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.699866 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.699884 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.803436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.803486 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.803503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.803567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.803586 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.907121 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.907206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.907236 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.907254 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:10 crc kubenswrapper[4576]: I1203 08:41:10.907266 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:10Z","lastTransitionTime":"2025-12-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.011005 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.011056 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.011067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.011087 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.011100 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.113639 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.113672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.113682 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.113695 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.113704 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.216047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.216086 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.216098 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.216118 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.216133 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.318176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.318464 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.318565 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.318662 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.318740 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.421610 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.421686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.421711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.421734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.421751 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.524593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.524629 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.524639 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.524654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.524664 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.626903 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.626940 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.626949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.626962 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.626970 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.676821 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:11 crc kubenswrapper[4576]: E1203 08:41:11.677025 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.677303 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:11 crc kubenswrapper[4576]: E1203 08:41:11.678073 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.730917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.730970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.730986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.731009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.731025 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.834090 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.834166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.834180 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.834198 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.834215 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.918840 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.918946 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.918975 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.919052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.919080 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: E1203 08:41:11.941396 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:11Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.946840 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.946892 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.946912 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.946936 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.946955 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: E1203 08:41:11.959826 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:11Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.964198 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.964242 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.964261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.964284 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.964307 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:11 crc kubenswrapper[4576]: E1203 08:41:11.980432 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:11Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.985521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.985636 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.985658 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.985686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:11 crc kubenswrapper[4576]: I1203 08:41:11.985704 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:11Z","lastTransitionTime":"2025-12-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: E1203 08:41:12.007972 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:12Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.013449 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.013517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.013631 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.013665 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.013684 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: E1203 08:41:12.034168 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:12Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:12 crc kubenswrapper[4576]: E1203 08:41:12.034384 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.036592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.036659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.036685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.036716 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.036740 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.138841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.139097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.139210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.139325 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.139493 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.242692 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.242748 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.242825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.242859 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.242883 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.346062 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.346116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.346173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.346200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.346218 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.448992 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.449039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.449063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.449084 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.449098 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.552101 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.552154 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.552178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.552201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.552215 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.655985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.656074 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.656093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.656122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.656141 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.676700 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.676701 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:12 crc kubenswrapper[4576]: E1203 08:41:12.676867 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:12 crc kubenswrapper[4576]: E1203 08:41:12.676971 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.759868 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.759920 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.759940 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.759959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.759972 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.863074 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.863131 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.863144 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.863165 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.863212 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.966344 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.966412 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.966429 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.966456 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:12 crc kubenswrapper[4576]: I1203 08:41:12.966481 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:12Z","lastTransitionTime":"2025-12-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.069380 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.069417 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.069430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.069448 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.069459 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.172398 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.172453 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.172468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.172491 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.172503 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.275138 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.275169 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.275177 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.275190 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.275202 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.377125 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.377157 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.377255 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.377276 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.377286 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.479342 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.479370 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.479379 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.479393 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.479401 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.581422 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.581464 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.581475 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.581492 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.581501 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.676911 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:13 crc kubenswrapper[4576]: E1203 08:41:13.677062 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.677310 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:13 crc kubenswrapper[4576]: E1203 08:41:13.677396 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.685035 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.685101 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.685114 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.685130 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.685140 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.688722 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.701891 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.713039 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.725856 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.736957 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.755114 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.764665 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.776736 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bca0d4b-1e77-448b-b450-9dc02d46fa22\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.787304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.787351 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.787363 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.787409 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.787424 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.799596 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.814672 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.827869 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.839403 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.855188 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.864746 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.875242 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.886783 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.893494 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.893557 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.893568 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.893583 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.893595 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.903345 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.917867 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.927616 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:13Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.996428 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.996502 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.996521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.996582 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:13 crc kubenswrapper[4576]: I1203 08:41:13.996603 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:13Z","lastTransitionTime":"2025-12-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.099776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.099809 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.099816 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.099829 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.099838 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.203007 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.203081 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.203099 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.203118 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.203131 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.306011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.306100 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.306116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.306134 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.306146 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.408983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.409039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.409057 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.409087 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.409105 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.514103 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.514163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.514182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.514208 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.514227 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.617690 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.618030 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.618133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.618240 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.618328 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.676697 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:14 crc kubenswrapper[4576]: E1203 08:41:14.677431 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.676780 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:14 crc kubenswrapper[4576]: E1203 08:41:14.677842 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.721599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.721941 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.722101 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.722274 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.722729 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.825654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.825700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.825717 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.825736 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.825750 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.929629 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.930157 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.930382 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.930710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:14 crc kubenswrapper[4576]: I1203 08:41:14.930894 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:14Z","lastTransitionTime":"2025-12-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.033660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.033697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.033707 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.033722 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.033733 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.136979 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.137056 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.137078 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.137111 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.137133 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.240424 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.240489 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.240513 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.240597 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.240621 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.345333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.345404 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.345430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.345476 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.345500 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.454447 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.454581 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.454602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.454628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.454647 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.557825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.557906 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.557932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.557967 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.557990 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.661503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.661577 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.661588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.661606 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.661617 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.677210 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:15 crc kubenswrapper[4576]: E1203 08:41:15.677392 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.677514 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:15 crc kubenswrapper[4576]: E1203 08:41:15.678126 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.765292 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.765357 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.765376 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.765400 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.765425 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.867846 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.867902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.867920 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.867945 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.867962 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.970435 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.970592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.970615 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.970710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:15 crc kubenswrapper[4576]: I1203 08:41:15.970844 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:15Z","lastTransitionTime":"2025-12-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.074672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.074798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.074820 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.074844 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.074861 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.177752 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.177802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.177813 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.177830 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.177842 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.280712 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.280769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.280779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.280798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.280807 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.385117 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.385162 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.385178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.385195 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.385208 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.487769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.487806 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.487814 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.487847 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.487858 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.592002 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.592063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.592079 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.592103 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.592120 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.676824 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.676917 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:16 crc kubenswrapper[4576]: E1203 08:41:16.676953 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:16 crc kubenswrapper[4576]: E1203 08:41:16.677071 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.695122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.695182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.695194 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.695244 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.695263 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.798187 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.798289 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.798308 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.798333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.798351 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.901245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.901321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.901346 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.901384 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:16 crc kubenswrapper[4576]: I1203 08:41:16.901409 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:16Z","lastTransitionTime":"2025-12-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.004298 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.004389 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.004426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.004465 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.004489 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.108166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.108205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.108217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.108235 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.108246 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.210837 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.210899 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.210917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.210945 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.210962 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.313624 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.313718 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.313796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.313828 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.313843 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.415917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.415952 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.415960 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.415974 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.415985 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.518718 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.518775 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.518788 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.518803 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.518815 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.622986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.623054 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.623074 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.623098 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.623111 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.677253 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.677296 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:17 crc kubenswrapper[4576]: E1203 08:41:17.677560 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:17 crc kubenswrapper[4576]: E1203 08:41:17.677697 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.726415 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.726462 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.726478 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.726503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.726520 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.830937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.831093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.831131 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.831188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.831211 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.934428 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.934473 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.934485 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.934505 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:17 crc kubenswrapper[4576]: I1203 08:41:17.934519 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:17Z","lastTransitionTime":"2025-12-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.050020 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.050178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.050221 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.050255 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.050288 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.157922 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.157958 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.157995 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.158011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.158022 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.261311 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.261361 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.261370 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.261384 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.261393 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.364914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.364966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.364985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.365012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.365033 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.468871 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.468941 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.468964 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.468993 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.469018 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.575683 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.575796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.575867 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.575922 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.575989 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.677592 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.677586 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.678956 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.679130 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.680185 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.680255 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.680277 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.680299 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.680317 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.784336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.784395 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.784418 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.784447 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.784471 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.863269 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.863582 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.863604 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:22.863487738 +0000 UTC m=+150.249464752 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.863743 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.863823 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.863887 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.863984 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:22.863971411 +0000 UTC m=+150.249948475 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864034 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.863893 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864164 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:22.864131946 +0000 UTC m=+150.250108970 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864296 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864330 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864355 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864397 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:22.864382052 +0000 UTC m=+150.250359086 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864469 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864499 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864518 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:41:18 crc kubenswrapper[4576]: E1203 08:41:18.864613 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:22.864596678 +0000 UTC m=+150.250573692 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.887810 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.887871 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.887887 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.887911 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.887929 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.991027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.991063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.991072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.991086 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:18 crc kubenswrapper[4576]: I1203 08:41:18.991116 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:18Z","lastTransitionTime":"2025-12-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.094229 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.094273 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.094283 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.094305 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.094316 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.196907 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.196954 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.196970 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.196992 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.197021 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.300329 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.300417 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.300437 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.300463 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.300482 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.403776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.403825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.403838 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.403860 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.403874 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.507069 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.507141 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.507160 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.507190 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.507208 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.609496 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.609664 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.609697 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.609729 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.609752 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.677246 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.677290 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:19 crc kubenswrapper[4576]: E1203 08:41:19.677395 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:19 crc kubenswrapper[4576]: E1203 08:41:19.677515 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.713050 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.713098 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.713114 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.713135 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.713153 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.816003 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.816081 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.816099 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.816130 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.816147 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.919332 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.919416 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.919440 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.919472 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:19 crc kubenswrapper[4576]: I1203 08:41:19.919493 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:19Z","lastTransitionTime":"2025-12-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.023720 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.023808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.023829 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.023857 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.023880 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.127966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.128012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.128026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.128044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.128059 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.231210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.231255 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.231263 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.231279 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.231295 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.333932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.333990 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.334003 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.334022 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.334034 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.438184 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.438252 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.438271 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.438354 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.438380 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.541984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.542039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.542052 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.542072 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.542089 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.645512 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.645606 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.645630 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.645700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.645747 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.676280 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.676345 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:20 crc kubenswrapper[4576]: E1203 08:41:20.676398 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:20 crc kubenswrapper[4576]: E1203 08:41:20.676520 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.748833 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.748889 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.748904 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.748926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.748940 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.851734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.851779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.851791 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.851814 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.851827 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.954847 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.954931 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.954954 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.954986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:20 crc kubenswrapper[4576]: I1203 08:41:20.955011 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:20Z","lastTransitionTime":"2025-12-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.058110 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.058166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.058179 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.058198 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.058213 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.160605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.160644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.160654 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.160668 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.160678 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.262987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.263041 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.263057 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.263079 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.263094 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.372612 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.372659 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.372668 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.372686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.372697 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.475904 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.476178 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.476189 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.476211 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.476223 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.579260 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.579309 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.579321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.579335 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.579344 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.677021 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.677126 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:21 crc kubenswrapper[4576]: E1203 08:41:21.677287 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:21 crc kubenswrapper[4576]: E1203 08:41:21.677409 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.681623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.681660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.681671 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.681686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.681697 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.785380 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.785454 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.785468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.785488 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.785500 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.887787 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.887837 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.887850 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.887872 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.887884 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.990836 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.990864 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.990873 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.990888 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:21 crc kubenswrapper[4576]: I1203 08:41:21.990896 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:21Z","lastTransitionTime":"2025-12-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.094430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.094472 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.094482 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.094501 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.094511 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.197200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.197237 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.197246 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.197262 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.197272 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.271609 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.271852 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.271928 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.271967 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.271990 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.292223 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.298567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.298603 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.298634 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.298655 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.298666 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.319382 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.323965 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.323994 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.324005 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.324021 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.324033 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.338303 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.343716 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.343781 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.343795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.343819 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.343832 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.363376 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.372740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.372808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.372821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.372838 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.372850 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.399961 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:22Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.400171 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.402570 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.402827 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.402984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.403129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.403258 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.506915 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.506972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.506986 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.507005 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.507017 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.609997 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.610430 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.610594 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.610829 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.610939 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.676696 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.676701 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.676897 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:22 crc kubenswrapper[4576]: E1203 08:41:22.677073 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.678841 4576 scope.go:117] "RemoveContainer" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.713769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.713825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.713842 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.713869 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.713887 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.817686 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.817718 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.817726 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.817740 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.817750 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.920562 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.920606 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.920621 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.920646 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:22 crc kubenswrapper[4576]: I1203 08:41:22.920659 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:22Z","lastTransitionTime":"2025-12-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.035950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.036025 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.036050 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.036085 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.036108 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.138400 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.138436 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.138447 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.138461 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.138470 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.259142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.259176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.259186 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.259200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.259210 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.368714 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.368746 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.368755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.368767 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.368777 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.471568 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.471617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.471628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.471649 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.471662 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.578985 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.579030 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.579044 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.579060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.579072 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.669012 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/2.log" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.672654 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.673509 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.674042 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.676373 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.676409 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:23 crc kubenswrapper[4576]: E1203 08:41:23.676478 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:23 crc kubenswrapper[4576]: E1203 08:41:23.676967 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.685446 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.685520 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.685593 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.685626 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.685648 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.691846 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bca0d4b-1e77-448b-b450-9dc02d46fa22\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.712799 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.726976 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.840869 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.845321 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.845362 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.845373 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.845425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.845436 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.859219 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.888126 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.905100 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.917274 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.928558 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.940430 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.947434 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.947471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.947482 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.947497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.947507 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:23Z","lastTransitionTime":"2025-12-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.962702 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.980196 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:23 crc kubenswrapper[4576]: I1203 08:41:23.997733 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:23Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.009275 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.019887 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.029345 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.044611 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.050115 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.050147 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.050157 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.050173 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.050184 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.055585 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.075123 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.087443 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.153813 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.153852 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.153861 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.153876 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.153885 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.157814 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.176297 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.188219 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.203153 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.213236 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.227827 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.239005 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.253656 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.255452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.255489 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.255500 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.255516 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.255541 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.265932 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.286553 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.302192 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.312686 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.324678 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.344757 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.379960 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.380012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.380163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.380177 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.380191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.380202 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.395443 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.408968 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.422313 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bca0d4b-1e77-448b-b450-9dc02d46fa22\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:24Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.483047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.483088 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.483099 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.483116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.483130 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.585284 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.585322 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.585333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.585369 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.585381 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.676127 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.676180 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:24 crc kubenswrapper[4576]: E1203 08:41:24.676243 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:24 crc kubenswrapper[4576]: E1203 08:41:24.676313 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.688021 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.688067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.688077 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.688091 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.688102 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.791471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.791609 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.791628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.791655 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.791673 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.895373 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.895452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.895470 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.895497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.895516 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.998929 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.998993 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.999012 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.999037 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:24 crc kubenswrapper[4576]: I1203 08:41:24.999055 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:24Z","lastTransitionTime":"2025-12-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.101760 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.101789 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.101798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.101812 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.101820 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.204499 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.204585 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.204598 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.204619 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.204635 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.306931 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.307166 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.307255 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.307361 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.307516 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.410706 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.411127 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.411347 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.411626 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.411895 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.514977 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.515100 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.515114 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.515130 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.515141 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.618032 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.618077 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.618090 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.618122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.618140 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.676383 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.676760 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:25 crc kubenswrapper[4576]: E1203 08:41:25.676891 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:25 crc kubenswrapper[4576]: E1203 08:41:25.677019 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.685653 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/3.log" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.687185 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/2.log" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.691681 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.693080 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" exitCode=1 Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.693106 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.693239 4576 scope.go:117] "RemoveContainer" containerID="4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.695452 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:41:25 crc kubenswrapper[4576]: E1203 08:41:25.695819 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.712018 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bca0d4b-1e77-448b-b450-9dc02d46fa22\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72b5f4e5220da313486780f8e36e04729be9d90668c20543e75fa00b76d947cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://058cd98ce4a653c95efde10871cfaec6b4e1ac05c01a43c4b18d1d5a822e6793\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.721683 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.721753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.721780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.721950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.722017 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.747747 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9942901-4be1-41db-b32c-9b996b72901f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88d9b58084ebfe06075e53fe77aec63e6c4e338aec6e1f31998d4bfcaca7ba77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac6d69cbfb86f4b717a66b46125d5d530049473a1f8b21e3be0d85c37ba5b837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3dc0695bda2cc75c2ccf82050ca47b54244a19319c741a2368e448f635ee3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fdc3c90dc45378285ef9d57dceea95152497f0aefe12991a47354d11a0c838b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b49f6c1bad412759cfa18b56387ca168d26a69640f1c15c1b82040b73531bf3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88e3df2e48984d117287b6b160f874f0e4c9777b2c8480d3843f9c0ad4fb500f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://deff88e8d49bac1916d4f7a35230ab2df886d25a792c8120577a787b865673c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebca6254314d23414686717165abad39a28f467dd3a0265deb0ba884e860ddf9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.764393 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90118b09-8d4e-4eb5-831f-56e13fa31009\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e8249d9d3f0b3fa875a28bfabfe8451d5ceb03b3b5f5c93d4cf13a9ddb36da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7786549693d2464a701d6b3662385b9ba546302a82aae6a62e8792d937c208c4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a717b57cd90a9c90b38f1276fbee44f7f57a848fa91334d290805e286eda8aff\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.775316 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3038dcea3c147a1d523c7cd5c6ec049a499b39dda4de06d42324e6e8998334f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52333a05f039ec84c87b0170524958d01fbb7688666f98dd54700a6398ce3683\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.785104 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba9d9c6-a657-4ac0-99e4-5ec5babb64ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d5b6a2b0818d8e906110415190bed49348e53d7d987987ce0aa699a7d46378b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://502bb671eef9e8e245ba86484a31e8009e90df1c55cc320e8b7bcf227cdd9ae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bfkfz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r47b6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.797737 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5926f9c-3fa0-4df6-93a7-70f581151e0e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1203 08:40:07.444275 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 08:40:07.445697 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1959907056/tls.crt::/tmp/serving-cert-1959907056/tls.key\\\\\\\"\\\\nI1203 08:40:14.195821 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 08:40:14.197999 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 08:40:14.198027 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 08:40:14.198051 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 08:40:14.198058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 08:40:14.205785 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 08:40:14.205809 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205813 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 08:40:14.205816 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 08:40:14.205819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 08:40:14.205821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 08:40:14.205824 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 08:40:14.205954 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 08:40:14.210496 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.809035 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"265e9cfe-6976-462e-b87f-9699e3a4c902\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:39:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89498b02a40a022c6a40707b5c5e4fa9c94e57d4c7a78ab9687c3e55b3860c26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf315b6a16cc730b3f5b6c98397a15b8542f4ea7812285c4f9aee06bab125942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f74d1ddba25f70ad4e5966802bf04babfc9035636f70bae24c2b97ebacb4bda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:39:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4992dfdb73bd089a847a14022b609cad34fef18469532862b7b943400c22794d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:39:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:39:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:39:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.819627 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.824157 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.824201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.824210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.824227 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.824238 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.832323 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7487354b977692f3cf1a8c2c9ede2bafac976404219c033a5c58f7695f039aa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.845401 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.862058 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q8kww" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72bb738c-5ba1-4104-8729-1a929fa6d2ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06ad4110fcb33e7b3cd2221f15d0b056660f9111f5ac3fd5f2633d4269ee553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d43a44b901d6f738f10a71a0055c8773bb80b1a86f3d2b956658dfaea8ace52c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://876f48d7f74ede4476d707c6f571f7dcfd007784c85cfa40c1cd169818675797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://718538b6e97ac06e8d922f4631385bb9c07a74828a68aed244b345060461f8da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29b94a3cc5682daecb70c8742dbb32925504b181e7f5edaf52b4a9b78957676b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af72d0878090767f73a2b559179845ca64a2a9b30fa7cb2f6a0b0c3758cc6d4c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f62402de71677b41b19c9edfc96b4ae3d00e42daa1e7832be04d2774ee62c4d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-phk44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q8kww\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.874104 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6qf5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a93555e6-d807-4db3-85c0-843f309e6efa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69aea80b1478f26af6f4835e1599afdb41d2a619ca44bfbdd35b1cdf664a7a2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mplbz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6qf5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.886216 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.898838 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff480bdd6b31fa53fe906aaa2459ccce17b507c4789f4ab6b64a8d2f286c9b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.910556 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jbxx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2b7eac2-6611-49d0-9da5-f97a3ccc9529\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:05Z\\\",\\\"message\\\":\\\"2025-12-03T08:40:20+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0\\\\n2025-12-03T08:40:20+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c4b65084-905c-448c-8be2-61fd5cb5a2c0 to /host/opt/cni/bin/\\\\n2025-12-03T08:40:20Z [verbose] multus-daemon started\\\\n2025-12-03T08:40:20Z [verbose] Readiness Indicator file check\\\\n2025-12-03T08:41:05Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:41:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9gm6g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jbxx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.923520 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60b1bede-26e9-4b5d-b450-9866da685693\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c6e7072fbdbb1574e02549ccd40206d728b36405b4c49b87e7f9fe11a8e0a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrrgd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pjb2d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.927383 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.927415 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.927424 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.927437 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.927446 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:25Z","lastTransitionTime":"2025-12-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.943549 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfe34f07-1425-4b62-9eb0-70d1b197611c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:18Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c62c5d08f556f98c9679a8d29c4b82580c823b78e36384aa4d66d39035b2fd0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:40:56Z\\\",\\\"message\\\":\\\"hift-etcd/etcd_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1203 08:40:56.253688 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T08:41:24Z\\\",\\\"message\\\":\\\"} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:41:24.573762 6521 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 08:41:24.573971 6521 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}\\\\nI1203 08:41:24.574008 6521 services_controller.go:360] Finished syncing service metrics on namespace openshift-apiserver-operator for network=default : 68.071463ms\\\\nF1203 08:41:24.569824 6521 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T08:41:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T08:40:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T08:40:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jqbqk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5f9zh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.953716 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-xrjlb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d804fbc-4f34-4fa5-9c67-fa7f3d8208ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc47ce4e85ba7755ceabf5b864579aee5ea67ffcc13719ac1068f6c46a5a7d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T08:40:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlwv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-xrjlb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:25 crc kubenswrapper[4576]: I1203 08:41:25.965484 4576 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c87be72e-a53b-42c9-bb32-f56cd0febe24\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T08:40:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvsfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T08:40:31Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pw7pk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:25Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.030092 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.030149 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.030159 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.030171 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.030181 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.133992 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.134115 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.134176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.134211 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.134277 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.237066 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.237107 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.237117 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.237146 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.237155 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.339984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.340037 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.340049 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.340067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.340082 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.443001 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.443041 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.443051 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.443067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.443082 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.546650 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.546696 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.546705 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.546721 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.546731 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.649804 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.649849 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.649860 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.649879 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.649894 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.676340 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.676355 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:26 crc kubenswrapper[4576]: E1203 08:41:26.676480 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:26 crc kubenswrapper[4576]: E1203 08:41:26.676625 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.700732 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/3.log" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.704145 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.752930 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.752989 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.753007 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.753024 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.753036 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.856272 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.856347 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.856371 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.856405 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.856431 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.959930 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.960019 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.960043 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.960069 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:26 crc kubenswrapper[4576]: I1203 08:41:26.960091 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:26Z","lastTransitionTime":"2025-12-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.064055 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.064122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.064146 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.064236 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.064255 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.166672 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.166717 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.166730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.166750 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.166762 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.269014 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.269048 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.269059 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.269076 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.269088 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.371716 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.371764 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.371776 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.371795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.371808 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.473971 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.473999 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.474009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.474021 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.474030 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.577119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.577182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.577201 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.577236 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.577275 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.676196 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.676212 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:27 crc kubenswrapper[4576]: E1203 08:41:27.676418 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:27 crc kubenswrapper[4576]: E1203 08:41:27.676467 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.679759 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.679821 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.679844 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.679875 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.679899 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.783077 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.783127 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.783141 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.783163 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.783178 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.886734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.886832 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.886869 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.886981 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.887022 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.991097 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.991172 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.991197 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.991253 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:27 crc kubenswrapper[4576]: I1203 08:41:27.991282 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:27Z","lastTransitionTime":"2025-12-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.094420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.094494 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.094569 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.094605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.094627 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.197608 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.197661 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.197679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.197703 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.197719 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.301410 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.301492 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.301515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.301585 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.301612 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.406073 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.406145 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.406164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.406191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.406209 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.509604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.509683 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.509710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.509743 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.509770 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.612588 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.612633 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.612645 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.612663 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.612675 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.676241 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.676326 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:28 crc kubenswrapper[4576]: E1203 08:41:28.676481 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:28 crc kubenswrapper[4576]: E1203 08:41:28.676743 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.715427 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.715460 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.715468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.715481 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.715491 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.819767 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.819831 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.819851 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.819875 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.819893 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.922142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.922182 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.922191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.922205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:28 crc kubenswrapper[4576]: I1203 08:41:28.922214 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:28Z","lastTransitionTime":"2025-12-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.024478 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.024517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.024548 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.024566 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.024576 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.127063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.127101 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.127110 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.127124 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.127133 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.229392 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.229429 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.229437 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.229450 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.229459 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.332426 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.332501 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.332573 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.332605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.332626 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.436506 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.436617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.436644 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.436673 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.436694 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.539442 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.539495 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.539508 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.539548 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.539563 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.642745 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.642790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.642801 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.642819 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.642829 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.677253 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.677425 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:29 crc kubenswrapper[4576]: E1203 08:41:29.677750 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:29 crc kubenswrapper[4576]: E1203 08:41:29.678184 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.746142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.746202 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.746217 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.746238 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.746578 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.849913 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.850002 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.850015 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.850036 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.850047 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.952860 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.952898 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.952908 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.952926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:29 crc kubenswrapper[4576]: I1203 08:41:29.952937 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:29Z","lastTransitionTime":"2025-12-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.056263 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.056360 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.056385 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.056450 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.056475 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.159544 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.159643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.159660 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.159677 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.159687 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.262954 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.263011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.263024 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.263042 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.263057 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.366271 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.366314 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.366327 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.366344 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.366356 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.469578 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.469704 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.469769 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.469798 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.469816 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.572468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.572553 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.572567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.572587 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.572601 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.675279 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.675325 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.675335 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.675350 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.675362 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.676609 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.676645 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:30 crc kubenswrapper[4576]: E1203 08:41:30.676871 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:30 crc kubenswrapper[4576]: E1203 08:41:30.677040 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.778260 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.778320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.778339 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.778365 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.778383 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.881285 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.881323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.881335 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.881389 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.881401 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.985915 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.986074 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.986117 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.986151 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:30 crc kubenswrapper[4576]: I1203 08:41:30.986182 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:30Z","lastTransitionTime":"2025-12-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.090434 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.090488 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.090507 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.090567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.090608 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.193327 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.193420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.193434 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.193451 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.193465 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.296794 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.296827 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.296835 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.296849 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.296857 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.399602 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.399661 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.399685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.399714 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.399736 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.503153 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.503228 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.503251 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.503276 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.503294 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.607184 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.607252 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.607275 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.607304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.607325 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.677293 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:31 crc kubenswrapper[4576]: E1203 08:41:31.677519 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.677853 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:31 crc kubenswrapper[4576]: E1203 08:41:31.678028 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.710909 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.710982 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.711006 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.711036 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.711062 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.813507 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.813590 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.813603 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.813623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.813637 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.916877 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.917218 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.917328 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.917506 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:31 crc kubenswrapper[4576]: I1203 08:41:31.917655 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:31Z","lastTransitionTime":"2025-12-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.021261 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.021314 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.021325 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.021345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.021358 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.125253 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.125694 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.125846 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.125997 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.126132 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.228451 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.228517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.228567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.228596 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.228619 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.332206 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.332271 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.332292 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.332318 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.332337 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.435966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.436057 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.436086 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.436119 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.436142 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.538741 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.538795 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.538808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.538825 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.538837 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.987471 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:32 crc kubenswrapper[4576]: E1203 08:41:32.987614 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.987675 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:32 crc kubenswrapper[4576]: E1203 08:41:32.987736 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.988076 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.988081 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:32 crc kubenswrapper[4576]: E1203 08:41:32.988382 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:32 crc kubenswrapper[4576]: E1203 08:41:32.988484 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.989847 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.989873 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.989891 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.989907 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:32 crc kubenswrapper[4576]: I1203 08:41:32.989919 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:32Z","lastTransitionTime":"2025-12-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.006351 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.011455 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.011499 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.011511 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.011547 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.011563 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.034425 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.038695 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.038725 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.038734 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.038747 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.038756 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.053196 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.057263 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.057296 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.057307 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.057323 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.057333 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.070849 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.074122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.074145 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.074152 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.074164 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.074173 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.086759 4576 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148060Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608860Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"86dcb870-937a-4231-a14c-053b8b425329\\\",\\\"systemUUID\\\":\\\"da5fb8e3-5a19-4f2d-831e-00b6f563dbea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T08:41:33Z is after 2025-08-24T17:21:41Z" Dec 03 08:41:33 crc kubenswrapper[4576]: E1203 08:41:33.086913 4576 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.088515 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.088676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.088689 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.088705 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.089020 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.190983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.191037 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.191049 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.191063 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.191074 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.294792 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.294857 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.294938 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.294983 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.295009 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.397984 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.398029 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.398040 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.398058 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.398075 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.501142 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.501257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.501281 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.501368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.501573 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.604420 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.604480 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.604497 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.604521 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.604574 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.710007 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.710039 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.710047 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.710061 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.710070 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.758887 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-q8kww" podStartSLOduration=78.758822715 podStartE2EDuration="1m18.758822715s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.758446144 +0000 UTC m=+101.144423128" watchObservedRunningTime="2025-12-03 08:41:33.758822715 +0000 UTC m=+101.144799719" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.793803 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-6qf5t" podStartSLOduration=78.793778115 podStartE2EDuration="1m18.793778115s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.782446104 +0000 UTC m=+101.168423088" watchObservedRunningTime="2025-12-03 08:41:33.793778115 +0000 UTC m=+101.179755109" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.812876 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.812918 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.812932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.812949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.812960 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.836016 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jbxx2" podStartSLOduration=78.835990739 podStartE2EDuration="1m18.835990739s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.834812557 +0000 UTC m=+101.220789571" watchObservedRunningTime="2025-12-03 08:41:33.835990739 +0000 UTC m=+101.221967733" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.886399 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podStartSLOduration=78.88637559 podStartE2EDuration="1m18.88637559s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.8596866 +0000 UTC m=+101.245663604" watchObservedRunningTime="2025-12-03 08:41:33.88637559 +0000 UTC m=+101.272352584" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.901136 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-xrjlb" podStartSLOduration=78.901112362 podStartE2EDuration="1m18.901112362s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.900218219 +0000 UTC m=+101.286195223" watchObservedRunningTime="2025-12-03 08:41:33.901112362 +0000 UTC m=+101.287089346" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.913154 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=28.913131422 podStartE2EDuration="28.913131422s" podCreationTimestamp="2025-12-03 08:41:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.91266152 +0000 UTC m=+101.298638504" watchObservedRunningTime="2025-12-03 08:41:33.913131422 +0000 UTC m=+101.299108416" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.915030 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.915067 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.915077 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.915090 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.915100 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:33Z","lastTransitionTime":"2025-12-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.945749 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=77.94572739 podStartE2EDuration="1m17.94572739s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.943799819 +0000 UTC m=+101.329776823" watchObservedRunningTime="2025-12-03 08:41:33.94572739 +0000 UTC m=+101.331704374" Dec 03 08:41:33 crc kubenswrapper[4576]: I1203 08:41:33.959010 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.958986463 podStartE2EDuration="1m18.958986463s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.958332045 +0000 UTC m=+101.344309029" watchObservedRunningTime="2025-12-03 08:41:33.958986463 +0000 UTC m=+101.344963467" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.006235 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=80.00621061 podStartE2EDuration="1m20.00621061s" podCreationTimestamp="2025-12-03 08:40:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:34.004229637 +0000 UTC m=+101.390206651" watchObservedRunningTime="2025-12-03 08:41:34.00621061 +0000 UTC m=+101.392187594" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.006633 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r47b6" podStartSLOduration=78.006624781 podStartE2EDuration="1m18.006624781s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:33.988792126 +0000 UTC m=+101.374769120" watchObservedRunningTime="2025-12-03 08:41:34.006624781 +0000 UTC m=+101.392601795" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.017952 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.018046 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.018076 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.018101 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.018113 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.020226 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=53.020212782 podStartE2EDuration="53.020212782s" podCreationTimestamp="2025-12-03 08:40:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:34.018759174 +0000 UTC m=+101.404736158" watchObservedRunningTime="2025-12-03 08:41:34.020212782 +0000 UTC m=+101.406189766" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.121790 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.121869 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.121899 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.121938 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.121957 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.224611 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.224710 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.224730 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.224755 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.224774 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.327196 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.327233 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.327245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.327262 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.327272 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.431503 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.431615 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.431652 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.431717 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.431745 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.534699 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.534753 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.534762 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.534775 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.534790 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.636961 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.637000 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.637009 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.637026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.637035 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.676801 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.676854 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.676837 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.676815 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:34 crc kubenswrapper[4576]: E1203 08:41:34.677042 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:34 crc kubenswrapper[4576]: E1203 08:41:34.677169 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:34 crc kubenswrapper[4576]: E1203 08:41:34.677302 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:34 crc kubenswrapper[4576]: E1203 08:41:34.677404 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.740514 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.740586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.740600 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.740617 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.740630 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.843783 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.843818 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.843827 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.843844 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.843853 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.946623 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.946674 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.946687 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.946706 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:34 crc kubenswrapper[4576]: I1203 08:41:34.946718 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:34Z","lastTransitionTime":"2025-12-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.050188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.050231 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.050242 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.050269 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.050317 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.153719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.153831 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.153854 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.153887 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.153908 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.260203 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.260411 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.260444 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.260572 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.260627 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.363890 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.363926 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.363935 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.363951 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.363960 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.466914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.466951 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.466978 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.466996 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.467008 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.570499 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.570591 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.570605 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.570624 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.570636 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.628261 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:35 crc kubenswrapper[4576]: E1203 08:41:35.629034 4576 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:41:35 crc kubenswrapper[4576]: E1203 08:41:35.629350 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs podName:c87be72e-a53b-42c9-bb32-f56cd0febe24 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:39.629270418 +0000 UTC m=+167.015247422 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs") pod "network-metrics-daemon-pw7pk" (UID: "c87be72e-a53b-42c9-bb32-f56cd0febe24") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.673791 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.673851 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.673870 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.673893 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.673912 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.778122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.778205 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.778218 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.778236 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.778251 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.881435 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.881517 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.881567 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.881592 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.881611 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.986385 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.986451 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.986468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.986496 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:35 crc kubenswrapper[4576]: I1203 08:41:35.986519 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:35Z","lastTransitionTime":"2025-12-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.089624 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.089711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.089726 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.089747 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.089757 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.193914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.194033 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.194054 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.194078 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.194179 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.298604 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.298670 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.298683 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.298699 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.298716 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.402069 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.402124 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.402133 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.402149 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.402158 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.505824 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.505932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.505949 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.505966 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.505976 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.609251 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.609302 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.609315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.609340 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.609359 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.676480 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.676565 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:36 crc kubenswrapper[4576]: E1203 08:41:36.676662 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:36 crc kubenswrapper[4576]: E1203 08:41:36.676717 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.676827 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.676834 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:36 crc kubenswrapper[4576]: E1203 08:41:36.677065 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:36 crc kubenswrapper[4576]: E1203 08:41:36.677192 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.711780 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.711831 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.711847 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.711868 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.711883 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.814641 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.814700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.814719 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.814745 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.814763 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.918268 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.918643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.918905 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.919129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:36 crc kubenswrapper[4576]: I1203 08:41:36.919359 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:36Z","lastTransitionTime":"2025-12-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.022870 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.023153 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.023233 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.023329 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.023409 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.127509 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.127580 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.127599 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.127620 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.127635 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.231194 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.231259 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.231283 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.231309 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.231331 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.334802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.334922 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.334987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.335014 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.335033 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.438336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.438402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.438421 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.438446 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.438468 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.542244 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.542286 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.542296 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.542315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.542328 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.645777 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.645896 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.645917 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.645944 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.645967 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.749913 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.749974 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.749987 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.750010 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.750027 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.852502 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.852578 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.852591 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.852607 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.852620 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.954490 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.954563 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.954577 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.954594 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:37 crc kubenswrapper[4576]: I1203 08:41:37.954607 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:37Z","lastTransitionTime":"2025-12-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.057802 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.057855 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.057864 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.057878 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.057887 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.160500 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.160554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.160568 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.160584 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.160596 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.263373 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.263453 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.263468 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.263486 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.263502 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.366104 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.366165 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.366176 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.366210 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.366220 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.467829 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.467897 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.467914 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.467937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.467954 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.571038 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.571093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.571105 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.571121 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.571133 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.674763 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.674824 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.674841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.674902 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.674916 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.677194 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.677255 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.677206 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:38 crc kubenswrapper[4576]: E1203 08:41:38.677328 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:38 crc kubenswrapper[4576]: E1203 08:41:38.677463 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.677574 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:38 crc kubenswrapper[4576]: E1203 08:41:38.678203 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:38 crc kubenswrapper[4576]: E1203 08:41:38.678368 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.678628 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:41:38 crc kubenswrapper[4576]: E1203 08:41:38.679040 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.778872 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.778953 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.778978 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.779011 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.779036 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.882020 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.882075 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.882093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.882116 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.882133 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.985405 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.985467 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.985486 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.985510 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:38 crc kubenswrapper[4576]: I1203 08:41:38.985573 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:38Z","lastTransitionTime":"2025-12-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.088338 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.088403 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.088427 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.088458 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.088484 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.191628 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.191681 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.191693 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.191712 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.191725 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.294391 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.294471 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.294495 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.294554 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.294579 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.397317 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.397368 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.397379 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.397397 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.397412 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.500192 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.500257 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.500298 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.500328 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.500348 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.604034 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.604129 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.604161 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.604188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.604208 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.706932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.707027 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.707049 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.707078 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.707102 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.810241 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.810346 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.810370 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.810402 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.810426 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.913770 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.913849 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.913876 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.913908 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:39 crc kubenswrapper[4576]: I1203 08:41:39.913930 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:39Z","lastTransitionTime":"2025-12-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.017419 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.017493 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.017516 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.017594 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.017618 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.120320 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.120356 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.120364 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.120379 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.120390 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.224245 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.224315 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.224341 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.224371 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.224393 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.328481 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.328676 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.328700 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.328792 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.328843 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.433303 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.433382 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.433401 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.433427 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.433445 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.537586 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.537670 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.537695 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.537738 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.537771 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.641580 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.641679 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.641704 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.641733 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.641768 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.676786 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.676837 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.676813 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.676938 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:40 crc kubenswrapper[4576]: E1203 08:41:40.677212 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:40 crc kubenswrapper[4576]: E1203 08:41:40.677369 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:40 crc kubenswrapper[4576]: E1203 08:41:40.677602 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:40 crc kubenswrapper[4576]: E1203 08:41:40.677721 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.744909 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.745026 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.745364 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.745699 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.745986 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.852141 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.852230 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.852279 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.852304 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.852321 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.956440 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.956519 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.956576 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.956608 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:40 crc kubenswrapper[4576]: I1203 08:41:40.956634 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:40Z","lastTransitionTime":"2025-12-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.059365 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.059425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.059446 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.059475 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.059498 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.162779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.162822 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.162840 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.162865 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.162883 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.267685 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.267793 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.267815 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.267841 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.267859 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.371937 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.371997 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.372018 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.372042 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.372063 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.476768 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.476871 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.476890 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.476907 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.476916 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.580333 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.580407 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.580425 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.580453 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.580472 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.685704 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.685771 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.685789 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.686188 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.686271 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.789880 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.789933 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.789950 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.789975 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.789991 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.893923 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.894015 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.894065 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.894090 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.894108 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.998843 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.998897 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.998910 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.998930 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:41 crc kubenswrapper[4576]: I1203 08:41:41.998945 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:41Z","lastTransitionTime":"2025-12-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.101828 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.101899 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.101909 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.101923 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.101953 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.205275 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.205336 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.205352 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.205372 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.205385 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.308392 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.308452 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.308475 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.308574 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.308602 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.411972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.412073 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.412093 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.412122 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.412142 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.515863 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.515929 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.515943 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.515967 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.515980 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.618294 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.618643 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.618711 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.618779 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.618846 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.676788 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.676948 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.677316 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:42 crc kubenswrapper[4576]: E1203 08:41:42.677475 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.677502 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:42 crc kubenswrapper[4576]: E1203 08:41:42.677693 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:42 crc kubenswrapper[4576]: E1203 08:41:42.677741 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:42 crc kubenswrapper[4576]: E1203 08:41:42.678067 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.722158 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.722191 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.722200 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.722213 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.722222 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.825312 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.825889 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.825959 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.826060 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.826138 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.929292 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.929324 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.929332 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.929345 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:42 crc kubenswrapper[4576]: I1203 08:41:42.929354 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:42Z","lastTransitionTime":"2025-12-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.032833 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.032890 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.032908 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.032932 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.032954 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:43Z","lastTransitionTime":"2025-12-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.135754 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.135796 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.135808 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.135824 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.135835 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:43Z","lastTransitionTime":"2025-12-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.209972 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.210074 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.210757 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.210837 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.210865 4576 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T08:41:43Z","lastTransitionTime":"2025-12-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.296421 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz"] Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.297199 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.300062 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.300061 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.300077 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.301512 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.419706 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43f64bad-84f1-4005-b2aa-6384ae33ec0a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.420005 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.420092 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43f64bad-84f1-4005-b2aa-6384ae33ec0a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.420184 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.420256 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f64bad-84f1-4005-b2aa-6384ae33ec0a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521383 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43f64bad-84f1-4005-b2aa-6384ae33ec0a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521435 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521488 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f64bad-84f1-4005-b2aa-6384ae33ec0a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521666 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43f64bad-84f1-4005-b2aa-6384ae33ec0a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521715 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.521850 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43f64bad-84f1-4005-b2aa-6384ae33ec0a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.523821 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43f64bad-84f1-4005-b2aa-6384ae33ec0a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.537215 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f64bad-84f1-4005-b2aa-6384ae33ec0a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.549762 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43f64bad-84f1-4005-b2aa-6384ae33ec0a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tkjwz\" (UID: \"43f64bad-84f1-4005-b2aa-6384ae33ec0a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: I1203 08:41:43.614338 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" Dec 03 08:41:43 crc kubenswrapper[4576]: W1203 08:41:43.656199 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43f64bad_84f1_4005_b2aa_6384ae33ec0a.slice/crio-1ab3119da36b1953099616b29c1f7587a21cf24eabfea9f14622d5bee85217a9 WatchSource:0}: Error finding container 1ab3119da36b1953099616b29c1f7587a21cf24eabfea9f14622d5bee85217a9: Status 404 returned error can't find the container with id 1ab3119da36b1953099616b29c1f7587a21cf24eabfea9f14622d5bee85217a9 Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.076358 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" event={"ID":"43f64bad-84f1-4005-b2aa-6384ae33ec0a","Type":"ContainerStarted","Data":"a3f878d9f1e58249eac9acc6ca6c0b3d2d1000289ec515b4a43443125b0e8185"} Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.076449 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" event={"ID":"43f64bad-84f1-4005-b2aa-6384ae33ec0a","Type":"ContainerStarted","Data":"1ab3119da36b1953099616b29c1f7587a21cf24eabfea9f14622d5bee85217a9"} Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.676542 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.676604 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:44 crc kubenswrapper[4576]: E1203 08:41:44.676955 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.676687 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:44 crc kubenswrapper[4576]: I1203 08:41:44.676633 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:44 crc kubenswrapper[4576]: E1203 08:41:44.677146 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:44 crc kubenswrapper[4576]: E1203 08:41:44.677236 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:44 crc kubenswrapper[4576]: E1203 08:41:44.677272 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:46 crc kubenswrapper[4576]: I1203 08:41:46.676139 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:46 crc kubenswrapper[4576]: I1203 08:41:46.676167 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:46 crc kubenswrapper[4576]: I1203 08:41:46.676275 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:46 crc kubenswrapper[4576]: I1203 08:41:46.676315 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:46 crc kubenswrapper[4576]: E1203 08:41:46.676390 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:46 crc kubenswrapper[4576]: E1203 08:41:46.676690 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:46 crc kubenswrapper[4576]: E1203 08:41:46.676760 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:46 crc kubenswrapper[4576]: E1203 08:41:46.676850 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:48 crc kubenswrapper[4576]: I1203 08:41:48.676757 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:48 crc kubenswrapper[4576]: I1203 08:41:48.676801 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:48 crc kubenswrapper[4576]: I1203 08:41:48.676780 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:48 crc kubenswrapper[4576]: I1203 08:41:48.676757 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:48 crc kubenswrapper[4576]: E1203 08:41:48.677220 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:48 crc kubenswrapper[4576]: E1203 08:41:48.677403 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:48 crc kubenswrapper[4576]: E1203 08:41:48.677629 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:48 crc kubenswrapper[4576]: E1203 08:41:48.677779 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:50 crc kubenswrapper[4576]: I1203 08:41:50.677428 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:50 crc kubenswrapper[4576]: I1203 08:41:50.677518 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:50 crc kubenswrapper[4576]: I1203 08:41:50.677621 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:50 crc kubenswrapper[4576]: E1203 08:41:50.677758 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:50 crc kubenswrapper[4576]: I1203 08:41:50.677889 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:50 crc kubenswrapper[4576]: E1203 08:41:50.678067 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:50 crc kubenswrapper[4576]: E1203 08:41:50.678186 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:50 crc kubenswrapper[4576]: I1203 08:41:50.678249 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:41:50 crc kubenswrapper[4576]: E1203 08:41:50.678341 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:50 crc kubenswrapper[4576]: E1203 08:41:50.678608 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.113945 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/1.log" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.114585 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/0.log" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.114689 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2b7eac2-6611-49d0-9da5-f97a3ccc9529" containerID="43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9" exitCode=1 Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.114745 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerDied","Data":"43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9"} Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.114857 4576 scope.go:117] "RemoveContainer" containerID="87ab08c02e31fb934248fab8c55b94006e54110d6176cd571dbefd774980d9aa" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.115625 4576 scope.go:117] "RemoveContainer" containerID="43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9" Dec 03 08:41:52 crc kubenswrapper[4576]: E1203 08:41:52.115960 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jbxx2_openshift-multus(e2b7eac2-6611-49d0-9da5-f97a3ccc9529)\"" pod="openshift-multus/multus-jbxx2" podUID="e2b7eac2-6611-49d0-9da5-f97a3ccc9529" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.143145 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tkjwz" podStartSLOduration=97.14310164 podStartE2EDuration="1m37.14310164s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:41:44.104021301 +0000 UTC m=+111.489998295" watchObservedRunningTime="2025-12-03 08:41:52.14310164 +0000 UTC m=+119.529078634" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.677363 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.677367 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.677372 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:52 crc kubenswrapper[4576]: I1203 08:41:52.677411 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:52 crc kubenswrapper[4576]: E1203 08:41:52.677767 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:52 crc kubenswrapper[4576]: E1203 08:41:52.678117 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:52 crc kubenswrapper[4576]: E1203 08:41:52.678164 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:52 crc kubenswrapper[4576]: E1203 08:41:52.678299 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:53 crc kubenswrapper[4576]: I1203 08:41:53.121838 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/1.log" Dec 03 08:41:53 crc kubenswrapper[4576]: E1203 08:41:53.585544 4576 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 08:41:53 crc kubenswrapper[4576]: E1203 08:41:53.852837 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:41:54 crc kubenswrapper[4576]: I1203 08:41:54.676478 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:54 crc kubenswrapper[4576]: I1203 08:41:54.676483 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:54 crc kubenswrapper[4576]: I1203 08:41:54.676700 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:54 crc kubenswrapper[4576]: I1203 08:41:54.676704 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:54 crc kubenswrapper[4576]: E1203 08:41:54.676910 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:54 crc kubenswrapper[4576]: E1203 08:41:54.677035 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:54 crc kubenswrapper[4576]: E1203 08:41:54.677115 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:54 crc kubenswrapper[4576]: E1203 08:41:54.677208 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:56 crc kubenswrapper[4576]: I1203 08:41:56.676708 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:56 crc kubenswrapper[4576]: I1203 08:41:56.676769 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:56 crc kubenswrapper[4576]: I1203 08:41:56.676786 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:56 crc kubenswrapper[4576]: I1203 08:41:56.676836 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:56 crc kubenswrapper[4576]: E1203 08:41:56.676868 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:56 crc kubenswrapper[4576]: E1203 08:41:56.676966 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:56 crc kubenswrapper[4576]: E1203 08:41:56.677157 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:56 crc kubenswrapper[4576]: E1203 08:41:56.677247 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:58 crc kubenswrapper[4576]: I1203 08:41:58.676358 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:41:58 crc kubenswrapper[4576]: I1203 08:41:58.676487 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:41:58 crc kubenswrapper[4576]: I1203 08:41:58.676502 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:41:58 crc kubenswrapper[4576]: I1203 08:41:58.676626 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:41:58 crc kubenswrapper[4576]: E1203 08:41:58.677501 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:41:58 crc kubenswrapper[4576]: E1203 08:41:58.677863 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:41:58 crc kubenswrapper[4576]: E1203 08:41:58.678079 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:41:58 crc kubenswrapper[4576]: E1203 08:41:58.678240 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:41:58 crc kubenswrapper[4576]: E1203 08:41:58.854729 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:42:00 crc kubenswrapper[4576]: I1203 08:42:00.676785 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:00 crc kubenswrapper[4576]: I1203 08:42:00.676884 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:00 crc kubenswrapper[4576]: I1203 08:42:00.676905 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:00 crc kubenswrapper[4576]: I1203 08:42:00.676965 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:00 crc kubenswrapper[4576]: E1203 08:42:00.677099 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:00 crc kubenswrapper[4576]: E1203 08:42:00.677257 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:00 crc kubenswrapper[4576]: E1203 08:42:00.677367 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:00 crc kubenswrapper[4576]: E1203 08:42:00.677443 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:02 crc kubenswrapper[4576]: I1203 08:42:02.676843 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:02 crc kubenswrapper[4576]: I1203 08:42:02.676840 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:02 crc kubenswrapper[4576]: E1203 08:42:02.677146 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:02 crc kubenswrapper[4576]: I1203 08:42:02.677458 4576 scope.go:117] "RemoveContainer" containerID="43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9" Dec 03 08:42:02 crc kubenswrapper[4576]: I1203 08:42:02.677605 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:02 crc kubenswrapper[4576]: E1203 08:42:02.677785 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:02 crc kubenswrapper[4576]: E1203 08:42:02.678106 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:02 crc kubenswrapper[4576]: I1203 08:42:02.680499 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:02 crc kubenswrapper[4576]: E1203 08:42:02.681803 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:03 crc kubenswrapper[4576]: I1203 08:42:03.166942 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/1.log" Dec 03 08:42:03 crc kubenswrapper[4576]: I1203 08:42:03.167378 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerStarted","Data":"93a0f3796d3ca6c9b16721e20d639fadcb243de13badf957cca591620294c930"} Dec 03 08:42:03 crc kubenswrapper[4576]: I1203 08:42:03.679141 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:42:03 crc kubenswrapper[4576]: E1203 08:42:03.679437 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5f9zh_openshift-ovn-kubernetes(cfe34f07-1425-4b62-9eb0-70d1b197611c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" Dec 03 08:42:03 crc kubenswrapper[4576]: E1203 08:42:03.855611 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:42:04 crc kubenswrapper[4576]: I1203 08:42:04.676928 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:04 crc kubenswrapper[4576]: I1203 08:42:04.677017 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:04 crc kubenswrapper[4576]: I1203 08:42:04.677073 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:04 crc kubenswrapper[4576]: I1203 08:42:04.677016 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:04 crc kubenswrapper[4576]: E1203 08:42:04.677328 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:04 crc kubenswrapper[4576]: E1203 08:42:04.677519 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:04 crc kubenswrapper[4576]: E1203 08:42:04.678331 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:04 crc kubenswrapper[4576]: E1203 08:42:04.678487 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:06 crc kubenswrapper[4576]: I1203 08:42:06.677159 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:06 crc kubenswrapper[4576]: I1203 08:42:06.677234 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:06 crc kubenswrapper[4576]: I1203 08:42:06.677255 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:06 crc kubenswrapper[4576]: I1203 08:42:06.677260 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:06 crc kubenswrapper[4576]: E1203 08:42:06.677355 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:06 crc kubenswrapper[4576]: E1203 08:42:06.677459 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:06 crc kubenswrapper[4576]: E1203 08:42:06.677724 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:06 crc kubenswrapper[4576]: E1203 08:42:06.677773 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:08 crc kubenswrapper[4576]: I1203 08:42:08.676807 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:08 crc kubenswrapper[4576]: I1203 08:42:08.676859 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:08 crc kubenswrapper[4576]: I1203 08:42:08.676851 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:08 crc kubenswrapper[4576]: I1203 08:42:08.676807 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:08 crc kubenswrapper[4576]: E1203 08:42:08.676990 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:08 crc kubenswrapper[4576]: E1203 08:42:08.677089 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:08 crc kubenswrapper[4576]: E1203 08:42:08.677189 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:08 crc kubenswrapper[4576]: E1203 08:42:08.677383 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:08 crc kubenswrapper[4576]: E1203 08:42:08.857579 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:42:10 crc kubenswrapper[4576]: I1203 08:42:10.677077 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:10 crc kubenswrapper[4576]: I1203 08:42:10.677077 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:10 crc kubenswrapper[4576]: I1203 08:42:10.677097 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:10 crc kubenswrapper[4576]: I1203 08:42:10.677218 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:10 crc kubenswrapper[4576]: E1203 08:42:10.678347 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:10 crc kubenswrapper[4576]: E1203 08:42:10.678350 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:10 crc kubenswrapper[4576]: E1203 08:42:10.678690 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:10 crc kubenswrapper[4576]: E1203 08:42:10.678904 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:12 crc kubenswrapper[4576]: I1203 08:42:12.677238 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:12 crc kubenswrapper[4576]: I1203 08:42:12.677303 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:12 crc kubenswrapper[4576]: I1203 08:42:12.677238 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:12 crc kubenswrapper[4576]: I1203 08:42:12.677352 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:12 crc kubenswrapper[4576]: E1203 08:42:12.677502 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:12 crc kubenswrapper[4576]: E1203 08:42:12.677770 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:12 crc kubenswrapper[4576]: E1203 08:42:12.677900 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:12 crc kubenswrapper[4576]: E1203 08:42:12.678033 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:13 crc kubenswrapper[4576]: E1203 08:42:13.858561 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:42:14 crc kubenswrapper[4576]: I1203 08:42:14.676606 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:14 crc kubenswrapper[4576]: I1203 08:42:14.676656 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:14 crc kubenswrapper[4576]: E1203 08:42:14.677132 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:14 crc kubenswrapper[4576]: I1203 08:42:14.676791 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:14 crc kubenswrapper[4576]: I1203 08:42:14.676721 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:14 crc kubenswrapper[4576]: E1203 08:42:14.677332 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:14 crc kubenswrapper[4576]: E1203 08:42:14.677489 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:14 crc kubenswrapper[4576]: E1203 08:42:14.677688 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:16 crc kubenswrapper[4576]: I1203 08:42:16.676946 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:16 crc kubenswrapper[4576]: I1203 08:42:16.677018 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:16 crc kubenswrapper[4576]: I1203 08:42:16.676989 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:16 crc kubenswrapper[4576]: E1203 08:42:16.677128 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:16 crc kubenswrapper[4576]: E1203 08:42:16.677210 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:16 crc kubenswrapper[4576]: E1203 08:42:16.677388 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:16 crc kubenswrapper[4576]: I1203 08:42:16.677793 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:16 crc kubenswrapper[4576]: E1203 08:42:16.677959 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:16 crc kubenswrapper[4576]: I1203 08:42:16.678184 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.235194 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/3.log" Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.238268 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.239104 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerStarted","Data":"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036"} Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.239570 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.275979 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podStartSLOduration=122.275944909 podStartE2EDuration="2m2.275944909s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:17.275862327 +0000 UTC m=+144.661839321" watchObservedRunningTime="2025-12-03 08:42:17.275944909 +0000 UTC m=+144.661921883" Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.667812 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pw7pk"] Dec 03 08:42:17 crc kubenswrapper[4576]: I1203 08:42:17.667933 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:17 crc kubenswrapper[4576]: E1203 08:42:17.668030 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:18 crc kubenswrapper[4576]: I1203 08:42:18.676794 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:18 crc kubenswrapper[4576]: I1203 08:42:18.676825 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:18 crc kubenswrapper[4576]: E1203 08:42:18.677294 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:18 crc kubenswrapper[4576]: I1203 08:42:18.676846 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:18 crc kubenswrapper[4576]: E1203 08:42:18.677387 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:18 crc kubenswrapper[4576]: E1203 08:42:18.677432 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:18 crc kubenswrapper[4576]: E1203 08:42:18.860796 4576 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 08:42:19 crc kubenswrapper[4576]: I1203 08:42:19.677195 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:19 crc kubenswrapper[4576]: E1203 08:42:19.677496 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:20 crc kubenswrapper[4576]: I1203 08:42:20.676752 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:20 crc kubenswrapper[4576]: I1203 08:42:20.676794 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:20 crc kubenswrapper[4576]: I1203 08:42:20.676821 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:20 crc kubenswrapper[4576]: E1203 08:42:20.676985 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:20 crc kubenswrapper[4576]: E1203 08:42:20.677114 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:20 crc kubenswrapper[4576]: E1203 08:42:20.677684 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:21 crc kubenswrapper[4576]: I1203 08:42:21.676984 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:21 crc kubenswrapper[4576]: E1203 08:42:21.677122 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.676311 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.676397 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.676698 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.676444 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.676829 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.676948 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.948699 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949079 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:44:24.948999445 +0000 UTC m=+272.334976529 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.949220 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.949347 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949644 4576 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949767 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:44:24.949733004 +0000 UTC m=+272.335710058 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.949646 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949864 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: I1203 08:42:22.949897 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949934 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949977 4576 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.949749 4576 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950075 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 08:44:24.950048983 +0000 UTC m=+272.336026007 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950093 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950122 4576 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950140 4576 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950212 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 08:44:24.950193927 +0000 UTC m=+272.336170971 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 08:42:22 crc kubenswrapper[4576]: E1203 08:42:22.950242 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 08:44:24.950228478 +0000 UTC m=+272.336205592 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 08:42:23 crc kubenswrapper[4576]: I1203 08:42:23.676190 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:23 crc kubenswrapper[4576]: E1203 08:42:23.677394 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pw7pk" podUID="c87be72e-a53b-42c9-bb32-f56cd0febe24" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.118971 4576 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.171746 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t7p55"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.172939 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.175439 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hhd5z"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.176079 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.177717 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.178646 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.181721 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.182037 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.182316 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.183008 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.183496 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.183519 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.184116 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.184668 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.185369 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.187170 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nsf2s"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.187793 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188033 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188254 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188264 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188371 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188603 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.188824 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.189091 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.189269 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.189920 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.190729 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.194958 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.195439 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.198466 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.199459 4576 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.199506 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.200670 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.200804 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.200910 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.206190 4576 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv": failed to list *v1.Secret: secrets "openshift-apiserver-operator-dockercfg-xtcjv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.206268 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-dockercfg-xtcjv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-dockercfg-xtcjv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.206488 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.206793 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.207037 4576 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.207079 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.207048 4576 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.207122 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.207245 4576 reflector.go:561] object-"openshift-apiserver-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.207270 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.207666 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.210648 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.211427 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tzl7"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.212047 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.212627 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.212699 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.212814 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.217604 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-l27nx"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.218126 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.218385 4576 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.218467 4576 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.218498 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.218588 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.219295 4576 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.219348 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.219452 4576 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.219469 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.219509 4576 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.219546 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.219609 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.222083 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.222935 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn"] Dec 03 08:42:24 crc kubenswrapper[4576]: W1203 08:42:24.223032 4576 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 03 08:42:24 crc kubenswrapper[4576]: E1203 08:42:24.223055 4576 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.223186 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.223291 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.223391 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.223604 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224157 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224394 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224508 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.225215 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224607 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224670 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.224714 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.225025 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.226341 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.232306 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.232654 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.233175 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.246261 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.246657 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.247410 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.247558 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.248875 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.249095 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.250328 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.265789 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266189 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266354 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266648 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266689 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266828 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266655 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267102 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267281 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267417 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267587 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267657 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267860 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.267959 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.266457 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.268117 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.268261 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.269785 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.272974 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6qg8m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.273596 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-qv4qp"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.273940 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.274316 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.274399 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.274360 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.274834 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.275930 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276036 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjgq6\" (UniqueName: \"kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276116 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-client\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276171 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276187 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276356 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276431 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit-dir\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276507 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-serving-cert\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276593 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5rj2\" (UniqueName: \"kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276687 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsgn7\" (UniqueName: \"kubernetes.io/projected/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-kube-api-access-xsgn7\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276750 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276817 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276908 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.276988 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-auth-proxy-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277062 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277136 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp52h\" (UniqueName: \"kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277263 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-dir\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277359 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277437 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28dzq\" (UniqueName: \"kubernetes.io/projected/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-kube-api-access-28dzq\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-serving-cert\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277683 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klbff\" (UniqueName: \"kubernetes.io/projected/163752d1-e68c-43c0-b869-2d7755551a1d-kube-api-access-klbff\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277756 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6swkq\" (UniqueName: \"kubernetes.io/projected/fe3569cb-a99a-4e1a-8b5f-124e68276330-kube-api-access-6swkq\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277841 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.277918 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-client\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278000 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278069 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278143 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278220 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278306 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-node-pullsecrets\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278368 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278570 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278739 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278924 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.279457 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.280397 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.280564 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.280860 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.278374 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281002 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j95dr\" (UniqueName: \"kubernetes.io/projected/7886546d-9341-4fcc-a861-d78d9fa35e98-kube-api-access-j95dr\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281038 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281070 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281093 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p28dj\" (UniqueName: \"kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281115 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-serving-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281158 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281181 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281209 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281232 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-config\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281253 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281277 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-encryption-config\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281299 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281325 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281348 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281369 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-serving-cert\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281396 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281421 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6522b\" (UniqueName: \"kubernetes.io/projected/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-kube-api-access-6522b\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281452 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/163752d1-e68c-43c0-b869-2d7755551a1d-metrics-tls\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281479 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.281505 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-policies\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283273 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283330 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283378 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c57d4112-6506-4f4c-86c2-c6d1249df640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283410 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dtfm\" (UniqueName: \"kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283430 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283458 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283484 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-image-import-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283504 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283544 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntqmc\" (UniqueName: \"kubernetes.io/projected/c57d4112-6506-4f4c-86c2-c6d1249df640-kube-api-access-ntqmc\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283568 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-machine-approver-tls\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283591 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283614 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283655 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283777 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283822 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283885 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-serving-cert\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283936 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpnwv\" (UniqueName: \"kubernetes.io/projected/f9d3808c-11ef-421d-83e6-b909679c5490-kube-api-access-fpnwv\") pod \"downloads-7954f5f757-l27nx\" (UID: \"f9d3808c-11ef-421d-83e6-b909679c5490\") " pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.283983 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-encryption-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.284012 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.292195 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.293696 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.293877 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.296486 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.296649 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.297923 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.298363 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.298580 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.298747 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.299409 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.299511 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.299669 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.300556 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.300965 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.301096 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.301493 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.301943 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.314008 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.314477 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.315198 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.315683 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.315948 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.316203 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.318199 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.318823 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.334632 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.335483 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.340270 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.342965 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.356924 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.357635 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.357923 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.359317 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.359839 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.360189 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.366391 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.367699 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.368093 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-sjsrz"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.368305 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.368496 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.368820 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.371865 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.372084 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.372946 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.373641 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.373806 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.375757 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.375932 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.380619 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.382700 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.383772 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.384443 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388017 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388123 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c57d4112-6506-4f4c-86c2-c6d1249df640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388313 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dtfm\" (UniqueName: \"kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388407 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-image-import-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388484 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388632 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388731 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388894 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.388981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.386103 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389101 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntqmc\" (UniqueName: \"kubernetes.io/projected/c57d4112-6506-4f4c-86c2-c6d1249df640-kube-api-access-ntqmc\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389380 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-machine-approver-tls\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389477 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389568 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389652 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389797 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/711bed39-85e0-418e-b746-659fef7c4b49-trusted-ca\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389873 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpnwv\" (UniqueName: \"kubernetes.io/projected/f9d3808c-11ef-421d-83e6-b909679c5490-kube-api-access-fpnwv\") pod \"downloads-7954f5f757-l27nx\" (UID: \"f9d3808c-11ef-421d-83e6-b909679c5490\") " pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389953 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-serving-cert\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390028 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-encryption-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390108 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-client\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390180 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-config\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390274 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzxps\" (UniqueName: \"kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390354 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390426 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390556 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390631 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit-dir\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390789 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjgq6\" (UniqueName: \"kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390865 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-client\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390938 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsgn7\" (UniqueName: \"kubernetes.io/projected/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-kube-api-access-xsgn7\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391013 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391087 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391161 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-serving-cert\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391234 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5rj2\" (UniqueName: \"kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391308 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391381 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp52h\" (UniqueName: \"kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391483 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391580 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391668 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-auth-proxy-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391748 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-dir\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391877 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.391951 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392018 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/711bed39-85e0-418e-b746-659fef7c4b49-metrics-tls\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392098 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-serving-cert\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392185 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28dzq\" (UniqueName: \"kubernetes.io/projected/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-kube-api-access-28dzq\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392284 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6swkq\" (UniqueName: \"kubernetes.io/projected/fe3569cb-a99a-4e1a-8b5f-124e68276330-kube-api-access-6swkq\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392361 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klbff\" (UniqueName: \"kubernetes.io/projected/163752d1-e68c-43c0-b869-2d7755551a1d-kube-api-access-klbff\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392435 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392514 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-config\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392627 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-service-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392701 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392772 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-client\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392844 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392920 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393067 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393140 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393387 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393462 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/598b95f5-9016-454b-be5b-4b9a4145479b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393561 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393637 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-node-pullsecrets\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393711 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j95dr\" (UniqueName: \"kubernetes.io/projected/7886546d-9341-4fcc-a861-d78d9fa35e98-kube-api-access-j95dr\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393782 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393855 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdjm5\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-kube-api-access-rdjm5\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393928 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394003 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p28dj\" (UniqueName: \"kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394071 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394148 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394225 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-serving-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394305 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phxss\" (UniqueName: \"kubernetes.io/projected/598b95f5-9016-454b-be5b-4b9a4145479b-kube-api-access-phxss\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394391 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394501 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394604 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-config\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394679 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-serving-cert\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394750 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394828 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394903 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394982 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395049 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395116 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-encryption-config\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395186 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-serving-cert\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395258 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395334 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395407 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6522b\" (UniqueName: \"kubernetes.io/projected/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-kube-api-access-6522b\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395480 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njs64\" (UniqueName: \"kubernetes.io/projected/2e949c9f-48d1-4da5-90a2-21e446f64020-kube-api-access-njs64\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.395561 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/598b95f5-9016-454b-be5b-4b9a4145479b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.407820 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.407981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/163752d1-e68c-43c0-b869-2d7755551a1d-metrics-tls\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408061 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408342 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2wfz\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-kube-api-access-w2wfz\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408431 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408516 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-policies\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408608 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408676 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.408767 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.409311 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-serving-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.394421 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.409972 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.410591 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.411068 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-auth-proxy-config\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.411115 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-dir\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.411669 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.412210 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-config\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.412856 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.412865 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-etcd-client\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.389656 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-image-import-ca\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.413314 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.413559 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.414092 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.421028 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.421415 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-serving-cert\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.421609 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.423482 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.424254 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-serving-cert\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.424600 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.425650 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.426687 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.427210 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.392098 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.427577 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit-dir\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.429034 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.429405 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.406106 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe3569cb-a99a-4e1a-8b5f-124e68276330-node-pullsecrets\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.402585 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c57d4112-6506-4f4c-86c2-c6d1249df640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.403188 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-audit\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.406053 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.393607 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hhd5z"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.430498 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-glrgq"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.431088 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t7p55"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.431110 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.431491 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8rpwp"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.431496 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-serving-cert\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.407683 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.390506 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.432015 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.432044 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.432435 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.407463 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.494969 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.495393 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-etcd-client\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.495664 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-encryption-config\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.496050 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-machine-approver-tls\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.496494 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/163752d1-e68c-43c0-b869-2d7755551a1d-metrics-tls\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.502360 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.502944 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.503248 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe3569cb-a99a-4e1a-8b5f-124e68276330-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.503597 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7886546d-9341-4fcc-a861-d78d9fa35e98-encryption-config\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.503749 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.503828 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.503914 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe3569cb-a99a-4e1a-8b5f-124e68276330-serving-cert\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.504145 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7886546d-9341-4fcc-a861-d78d9fa35e98-audit-policies\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.504542 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.505133 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-service-ca-bundle\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.505770 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.505909 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.505988 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.506088 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9xtqh"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.507634 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.508036 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.508269 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642557 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642629 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2wfz\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-kube-api-access-w2wfz\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642658 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642683 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642719 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642756 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642832 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/711bed39-85e0-418e-b746-659fef7c4b49-trusted-ca\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642876 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-client\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642896 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-config\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642914 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzxps\" (UniqueName: \"kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642947 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642975 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.642997 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643015 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/711bed39-85e0-418e-b746-659fef7c4b49-metrics-tls\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643058 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643077 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-config\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643111 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-service-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643132 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643174 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/598b95f5-9016-454b-be5b-4b9a4145479b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643207 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdjm5\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-kube-api-access-rdjm5\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643234 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phxss\" (UniqueName: \"kubernetes.io/projected/598b95f5-9016-454b-be5b-4b9a4145479b-kube-api-access-phxss\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643288 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643318 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-serving-cert\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643332 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643381 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643404 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njs64\" (UniqueName: \"kubernetes.io/projected/2e949c9f-48d1-4da5-90a2-21e446f64020-kube-api-access-njs64\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.643419 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/598b95f5-9016-454b-be5b-4b9a4145479b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.646853 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l27nx"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.769387 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.769558 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.769697 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770011 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770079 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tzl7"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770096 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770108 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770178 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.770876 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.771137 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.773688 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.773831 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.776343 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.504578 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.505541 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.611385 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.774587 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.774688 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.775248 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.785898 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.786067 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.787350 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.789719 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.790234 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.795366 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.804867 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.804985 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.806189 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.806333 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.807931 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.808330 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.813971 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.814213 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.814731 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-client\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.815158 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.817226 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6qg8m"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.817261 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nsf2s"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.817275 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.818753 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.818901 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.819244 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/711bed39-85e0-418e-b746-659fef7c4b49-trusted-ca\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.819304 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.820387 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-config\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.820448 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2e949c9f-48d1-4da5-90a2-21e446f64020-etcd-service-ca\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.821204 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.821252 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.821450 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.821653 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.821764 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.824649 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.826385 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.828697 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.829327 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/598b95f5-9016-454b-be5b-4b9a4145479b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.830611 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.830658 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.832312 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-brdzq"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.832974 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/598b95f5-9016-454b-be5b-4b9a4145479b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.833186 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.834368 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e949c9f-48d1-4da5-90a2-21e446f64020-serving-cert\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.834714 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.835336 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.835905 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/711bed39-85e0-418e-b746-659fef7c4b49-metrics-tls\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.836758 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-28lz2"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.837281 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.838802 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-sjsrz"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.841935 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.842080 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.843673 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.844943 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.846318 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.847798 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-qv4qp"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.849633 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.850908 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.853684 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.854939 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.857266 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.858859 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.860501 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.862040 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.863649 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.864936 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-28lz2"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.867173 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8rpwp"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.868894 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6svzg"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.869570 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.870297 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.871845 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-brdzq"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.873571 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.875090 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.875625 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9xtqh"] Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.895090 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.898723 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-config\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.915212 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.936492 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.955348 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 08:42:24 crc kubenswrapper[4576]: I1203 08:42:24.976213 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.015037 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.036373 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.055162 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.075835 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.095706 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.115592 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.135464 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.155120 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.176318 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.197023 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.216150 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.235906 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.256295 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.275772 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.296814 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.315551 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.336261 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.355947 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.375230 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.389514 4576 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.389731 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls podName:bedc7336-f16c-485b-8cc8-13eea705d68a nodeName:}" failed. No retries permitted until 2025-12-03 08:42:25.889690821 +0000 UTC m=+153.275667845 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-nsf2s" (UID: "bedc7336-f16c-485b-8cc8-13eea705d68a") : failed to sync secret cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.390246 4576 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.390309 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config podName:bedc7336-f16c-485b-8cc8-13eea705d68a nodeName:}" failed. No retries permitted until 2025-12-03 08:42:25.890292167 +0000 UTC m=+153.276269191 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config") pod "machine-api-operator-5694c8668f-nsf2s" (UID: "bedc7336-f16c-485b-8cc8-13eea705d68a") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.393794 4576 request.go:700] Waited for 1.01267243s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.395365 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.406690 4576 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.406887 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert podName:e82ee5bd-9dd0-4d09-a629-21a1d08dbb06 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:25.906843272 +0000 UTC m=+153.292820296 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-lskhl" (UID: "e82ee5bd-9dd0-4d09-a629-21a1d08dbb06") : failed to sync secret cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.407814 4576 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.408053 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images podName:bedc7336-f16c-485b-8cc8-13eea705d68a nodeName:}" failed. No retries permitted until 2025-12-03 08:42:25.908021323 +0000 UTC m=+153.293998347 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images") pod "machine-api-operator-5694c8668f-nsf2s" (UID: "bedc7336-f16c-485b-8cc8-13eea705d68a") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.415431 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.453508 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dtfm\" (UniqueName: \"kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm\") pod \"controller-manager-879f6c89f-km29k\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.472719 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntqmc\" (UniqueName: \"kubernetes.io/projected/c57d4112-6506-4f4c-86c2-c6d1249df640-kube-api-access-ntqmc\") pod \"cluster-samples-operator-665b6dd947-22vzn\" (UID: \"c57d4112-6506-4f4c-86c2-c6d1249df640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.485830 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.497900 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjgq6\" (UniqueName: \"kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6\") pod \"route-controller-manager-6576b87f9c-gnxs8\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.505735 4576 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: E1203 08:42:25.505882 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config podName:e82ee5bd-9dd0-4d09-a629-21a1d08dbb06 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:26.005840891 +0000 UTC m=+153.391817915 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config") pod "openshift-apiserver-operator-796bbdcf4f-lskhl" (UID: "e82ee5bd-9dd0-4d09-a629-21a1d08dbb06") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.516823 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsgn7\" (UniqueName: \"kubernetes.io/projected/5375d73e-a10f-4e11-a6a7-9ec01e8a60ea-kube-api-access-xsgn7\") pod \"authentication-operator-69f744f599-hhd5z\" (UID: \"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.530013 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j95dr\" (UniqueName: \"kubernetes.io/projected/7886546d-9341-4fcc-a861-d78d9fa35e98-kube-api-access-j95dr\") pod \"apiserver-7bbb656c7d-74x2q\" (UID: \"7886546d-9341-4fcc-a861-d78d9fa35e98\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.551826 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p28dj\" (UniqueName: \"kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj\") pod \"oauth-openshift-558db77b4-76tfl\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.591924 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.609453 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpnwv\" (UniqueName: \"kubernetes.io/projected/f9d3808c-11ef-421d-83e6-b909679c5490-kube-api-access-fpnwv\") pod \"downloads-7954f5f757-l27nx\" (UID: \"f9d3808c-11ef-421d-83e6-b909679c5490\") " pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.630790 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6swkq\" (UniqueName: \"kubernetes.io/projected/fe3569cb-a99a-4e1a-8b5f-124e68276330-kube-api-access-6swkq\") pod \"apiserver-76f77b778f-t7p55\" (UID: \"fe3569cb-a99a-4e1a-8b5f-124e68276330\") " pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.692652 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.693391 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.705609 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.705716 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28dzq\" (UniqueName: \"kubernetes.io/projected/098beae0-8e26-45b7-bf18-cf6e4c83d7c7-kube-api-access-28dzq\") pod \"openshift-config-operator-7777fb866f-x5pdd\" (UID: \"098beae0-8e26-45b7-bf18-cf6e4c83d7c7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.727555 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.728458 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.731148 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klbff\" (UniqueName: \"kubernetes.io/projected/163752d1-e68c-43c0-b869-2d7755551a1d-kube-api-access-klbff\") pod \"dns-operator-744455d44c-6tzl7\" (UID: \"163752d1-e68c-43c0-b869-2d7755551a1d\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.734802 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.737428 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.751234 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6522b\" (UniqueName: \"kubernetes.io/projected/c212c1e9-bf50-4619-b4a0-7ae0659f7cab-kube-api-access-6522b\") pod \"machine-approver-56656f9798-5jnbz\" (UID: \"c212c1e9-bf50-4619-b4a0-7ae0659f7cab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.812782 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2wfz\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-kube-api-access-w2wfz\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.812935 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.813112 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.821062 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.821229 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.821564 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.866487 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.913808 4576 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.913922 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.913971 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.913817 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.915031 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.915973 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.916774 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.942601 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.973987 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.974047 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.974112 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.974141 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.986680 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 08:42:25 crc kubenswrapper[4576]: I1203 08:42:25.986938 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.110784 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.111795 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.132726 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.132942 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.133044 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.133147 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.136201 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.139226 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9444d9cb-fe03-4f32-ad40-84901ecfb0d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-q56k8\" (UID: \"9444d9cb-fe03-4f32-ad40-84901ecfb0d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.160740 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.161048 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzxps\" (UniqueName: \"kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps\") pod \"console-f9d7485db-h7ncw\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.177823 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.199039 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.215074 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.237068 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.238307 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.262462 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.265408 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: W1203 08:42:26.279855 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdcaf314_ef4a_4f8e_90fb_a597bcd1f343.slice/crio-68a752e2102b8abfbce82650c6ed176893712461426027f30fade658a25e5a16 WatchSource:0}: Error finding container 68a752e2102b8abfbce82650c6ed176893712461426027f30fade658a25e5a16: Status 404 returned error can't find the container with id 68a752e2102b8abfbce82650c6ed176893712461426027f30fade658a25e5a16 Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.289946 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" event={"ID":"a2991975-3b10-4f7d-aa48-750e3c402870","Type":"ContainerStarted","Data":"cdf8e93fe377693d8b35ed6fd1cd1fde17be6b97dc22655cafe243cf04a8fa81"} Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.292592 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" event={"ID":"c212c1e9-bf50-4619-b4a0-7ae0659f7cab","Type":"ContainerStarted","Data":"926749956b751296719ecc4f3aed81d70c4d5a87b580a513a6d2d247061870dd"} Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.335171 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.335672 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.340778 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.342060 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.355218 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.436962 4576 request.go:700] Waited for 1.617299811s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/configmaps?fieldSelector=metadata.name%3Dservice-ca-bundle&limit=500&resourceVersion=0 Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.436974 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.438245 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.452399 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.452993 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.453216 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.471593 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8xz88\" (UID: \"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.477507 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.485272 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdjm5\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-kube-api-access-rdjm5\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.524081 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phxss\" (UniqueName: \"kubernetes.io/projected/598b95f5-9016-454b-be5b-4b9a4145479b-kube-api-access-phxss\") pod \"openshift-controller-manager-operator-756b6f6bc6-tjv2m\" (UID: \"598b95f5-9016-454b-be5b-4b9a4145479b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.532751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/711bed39-85e0-418e-b746-659fef7c4b49-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jqrv8\" (UID: \"711bed39-85e0-418e-b746-659fef7c4b49\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.556098 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njs64\" (UniqueName: \"kubernetes.io/projected/2e949c9f-48d1-4da5-90a2-21e446f64020-kube-api-access-njs64\") pod \"etcd-operator-b45778765-6qg8m\" (UID: \"2e949c9f-48d1-4da5-90a2-21e446f64020\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.558873 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.566574 4576 projected.go:288] Couldn't get configMap openshift-machine-api/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.574981 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.622126 4576 projected.go:288] Couldn't get configMap openshift-apiserver-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.623724 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.624098 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.625649 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.636420 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.636847 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.657946 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.670851 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.682173 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.692411 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.696209 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.715697 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.840090 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.840722 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.840862 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.840970 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.840974 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.842819 4576 projected.go:194] Error preparing data for projected volume kube-api-access-fp52h for pod openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.842878 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h podName:e82ee5bd-9dd0-4d09-a629-21a1d08dbb06 nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.342860193 +0000 UTC m=+154.728837177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fp52h" (UniqueName: "kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h") pod "openshift-apiserver-operator-796bbdcf4f-lskhl" (UID: "e82ee5bd-9dd0-4d09-a629-21a1d08dbb06") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846062 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64658394-9b30-424c-b243-e997781796e1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846116 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-config\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846143 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99a12dc3-7eee-4751-9223-8f8d8098d045-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846158 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-trusted-ca\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846182 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/974495e5-ca1d-4c68-9184-244d7c08276b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.846512 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a12dc3-7eee-4751-9223-8f8d8098d045-config\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.847973 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848005 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64658394-9b30-424c-b243-e997781796e1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848038 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848070 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5191b1fc-36b6-4fae-909b-3d721b39dd49-serving-cert\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848108 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848127 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhzmk\" (UniqueName: \"kubernetes.io/projected/5191b1fc-36b6-4fae-909b-3d721b39dd49-kube-api-access-nhzmk\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848189 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848204 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4scw\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848223 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848251 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848269 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974495e5-ca1d-4c68-9184-244d7c08276b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848286 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcfvq\" (UniqueName: \"kubernetes.io/projected/974495e5-ca1d-4c68-9184-244d7c08276b-kube-api-access-vcfvq\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848317 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848334 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a12dc3-7eee-4751-9223-8f8d8098d045-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.848412 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64658394-9b30-424c-b243-e997781796e1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.850017 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.349999765 +0000 UTC m=+154.735976749 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.850362 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-images\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.851853 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.856743 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bedc7336-f16c-485b-8cc8-13eea705d68a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.859425 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 08:42:26 crc kubenswrapper[4576]: W1203 08:42:26.865036 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7886546d_9341_4fcc_a861_d78d9fa35e98.slice/crio-8ff554c23b1ba4b69e5250909c215f5ab8f4811c4ff80f154ea6a0f144dd7538 WatchSource:0}: Error finding container 8ff554c23b1ba4b69e5250909c215f5ab8f4811c4ff80f154ea6a0f144dd7538: Status 404 returned error can't find the container with id 8ff554c23b1ba4b69e5250909c215f5ab8f4811c4ff80f154ea6a0f144dd7538 Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.875563 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.890799 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.893813 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.893845 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t7p55"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.894878 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.902668 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.907518 4576 projected.go:194] Error preparing data for projected volume kube-api-access-f5rj2 for pod openshift-machine-api/machine-api-operator-5694c8668f-nsf2s: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.907876 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2 podName:bedc7336-f16c-485b-8cc8-13eea705d68a nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.407849689 +0000 UTC m=+154.793826673 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-f5rj2" (UniqueName: "kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2") pod "machine-api-operator-5694c8668f-nsf2s" (UID: "bedc7336-f16c-485b-8cc8-13eea705d68a") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.917724 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.931017 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.938001 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.939304 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tzl7"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.942745 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.948017 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hhd5z"] Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.949858 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950034 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974495e5-ca1d-4c68-9184-244d7c08276b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950073 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-metrics-certs\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950127 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-profile-collector-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950155 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950191 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64658394-9b30-424c-b243-e997781796e1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950215 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zqxg\" (UniqueName: \"kubernetes.io/projected/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-kube-api-access-8zqxg\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950240 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shx27\" (UniqueName: \"kubernetes.io/projected/b09df583-33ba-444d-aa5e-62158b7a3f80-kube-api-access-shx27\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950274 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64658394-9b30-424c-b243-e997781796e1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950305 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-certs\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950326 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-socket-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950363 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950381 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-apiservice-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950397 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-cabundle\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950412 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzvcj\" (UniqueName: \"kubernetes.io/projected/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-kube-api-access-qzvcj\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950427 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09df583-33ba-444d-aa5e-62158b7a3f80-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950452 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-config\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950473 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-plugins-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950546 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/154a9dc5-f312-436b-958f-c78ac4ec740c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950585 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99a12dc3-7eee-4751-9223-8f8d8098d045-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950606 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09df583-33ba-444d-aa5e-62158b7a3f80-proxy-tls\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950627 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950653 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-tmpfs\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950700 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/974495e5-ca1d-4c68-9184-244d7c08276b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950721 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/84029005-e8f5-4697-9fe2-4057607adb5e-proxy-tls\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950742 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19be1a82-6279-466c-a719-e346d59597be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950760 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwrnq\" (UniqueName: \"kubernetes.io/projected/19be1a82-6279-466c-a719-e346d59597be-kube-api-access-xwrnq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950794 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-mountpoint-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950810 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74ee01ca-ad15-4a8f-8c72-0daf093db217-service-ca-bundle\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950828 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7c6l\" (UniqueName: \"kubernetes.io/projected/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-kube-api-access-t7c6l\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950866 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-cert\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950882 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950898 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-srv-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.950915 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v77b9\" (UniqueName: \"kubernetes.io/projected/84029005-e8f5-4697-9fe2-4057607adb5e-kube-api-access-v77b9\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.951060 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.450998918 +0000 UTC m=+154.836975902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951164 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951241 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhzmk\" (UniqueName: \"kubernetes.io/projected/5191b1fc-36b6-4fae-909b-3d721b39dd49-kube-api-access-nhzmk\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951316 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk565\" (UniqueName: \"kubernetes.io/projected/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-kube-api-access-mk565\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951345 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-webhook-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951462 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951491 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m52j5\" (UniqueName: \"kubernetes.io/projected/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-kube-api-access-m52j5\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951756 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951800 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-srv-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951860 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951899 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psq99\" (UniqueName: \"kubernetes.io/projected/825049cb-3413-4d9b-81bb-7210dc51a05f-kube-api-access-psq99\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951942 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcfvq\" (UniqueName: \"kubernetes.io/projected/974495e5-ca1d-4c68-9184-244d7c08276b-kube-api-access-vcfvq\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.951965 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn5w9\" (UniqueName: \"kubernetes.io/projected/7b548daf-86aa-4fb1-86a8-3064794876c1-kube-api-access-fn5w9\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952007 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-default-certificate\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952032 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a12dc3-7eee-4751-9223-8f8d8098d045-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952056 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952103 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zcfq\" (UniqueName: \"kubernetes.io/projected/fa35292d-7cdf-4bb9-8fb0-82296792a332-kube-api-access-2zcfq\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952130 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-serving-cert\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952170 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-registration-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952327 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxwpt\" (UniqueName: \"kubernetes.io/projected/154a9dc5-f312-436b-958f-c78ac4ec740c-kube-api-access-nxwpt\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952353 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-csi-data-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952376 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-images\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952438 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-node-bootstrap-token\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952493 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-metrics-tls\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.952610 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d41bec2-af11-4659-9864-a8f5684af5da-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.954666 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/974495e5-ca1d-4c68-9184-244d7c08276b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.955338 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/974495e5-ca1d-4c68-9184-244d7c08276b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957048 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957361 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-trusted-ca\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957419 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6mkz\" (UniqueName: \"kubernetes.io/projected/0d41bec2-af11-4659-9864-a8f5684af5da-kube-api-access-t6mkz\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957448 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jm7l\" (UniqueName: \"kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957557 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-config-volume\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957604 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a12dc3-7eee-4751-9223-8f8d8098d045-config\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.957676 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7gvc\" (UniqueName: \"kubernetes.io/projected/c909961d-2992-451c-aa4e-4196e1936bbd-kube-api-access-f7gvc\") pod \"migrator-59844c95c7-88mqt\" (UID: \"c909961d-2992-451c-aa4e-4196e1936bbd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.958235 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959204 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959243 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64658394-9b30-424c-b243-e997781796e1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959265 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-key\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959404 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959424 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-config\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959510 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-stats-auth\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959590 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5191b1fc-36b6-4fae-909b-3d721b39dd49-serving-cert\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959613 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959724 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt85t\" (UniqueName: \"kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959794 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4scw\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959818 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959841 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj9pz\" (UniqueName: \"kubernetes.io/projected/f85719c9-01af-498b-ba00-7a43b7b923e3-kube-api-access-bj9pz\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959861 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-config\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.959887 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg5g5\" (UniqueName: \"kubernetes.io/projected/74ee01ca-ad15-4a8f-8c72-0daf093db217-kube-api-access-rg5g5\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.960179 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a12dc3-7eee-4751-9223-8f8d8098d045-config\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.961460 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64658394-9b30-424c-b243-e997781796e1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.961605 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.461588652 +0000 UTC m=+154.847565636 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.962852 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5191b1fc-36b6-4fae-909b-3d721b39dd49-trusted-ca\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.963085 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.966677 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a12dc3-7eee-4751-9223-8f8d8098d045-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.969397 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.972953 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.973429 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5191b1fc-36b6-4fae-909b-3d721b39dd49-serving-cert\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.974280 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64658394-9b30-424c-b243-e997781796e1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.974403 4576 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: E1203 08:42:26.974462 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config podName:bedc7336-f16c-485b-8cc8-13eea705d68a nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.974445558 +0000 UTC m=+155.360422542 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config") pod "machine-api-operator-5694c8668f-nsf2s" (UID: "bedc7336-f16c-485b-8cc8-13eea705d68a") : failed to sync configmap cache: timed out waiting for the condition Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.975229 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:26 crc kubenswrapper[4576]: I1203 08:42:26.975921 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.000984 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.022890 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 08:42:27 crc kubenswrapper[4576]: W1203 08:42:27.040470 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc82788b8_bd1f_49c5_8f63_e8e360008f40.slice/crio-bd5903104263fc81f3d7d3d4f7bf96e5f9f1ddffa3e09e780522b0162ced15ee WatchSource:0}: Error finding container bd5903104263fc81f3d7d3d4f7bf96e5f9f1ddffa3e09e780522b0162ced15ee: Status 404 returned error can't find the container with id bd5903104263fc81f3d7d3d4f7bf96e5f9f1ddffa3e09e780522b0162ced15ee Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061196 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.061353 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.561328852 +0000 UTC m=+154.947305836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061391 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zcfq\" (UniqueName: \"kubernetes.io/projected/fa35292d-7cdf-4bb9-8fb0-82296792a332-kube-api-access-2zcfq\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061426 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-serving-cert\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061447 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-registration-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061473 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxwpt\" (UniqueName: \"kubernetes.io/projected/154a9dc5-f312-436b-958f-c78ac4ec740c-kube-api-access-nxwpt\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061488 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-csi-data-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061505 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-images\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061520 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-node-bootstrap-token\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061562 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-metrics-tls\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061606 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d41bec2-af11-4659-9864-a8f5684af5da-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061628 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6mkz\" (UniqueName: \"kubernetes.io/projected/0d41bec2-af11-4659-9864-a8f5684af5da-kube-api-access-t6mkz\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061646 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jm7l\" (UniqueName: \"kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061662 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-config-volume\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061696 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7gvc\" (UniqueName: \"kubernetes.io/projected/c909961d-2992-451c-aa4e-4196e1936bbd-kube-api-access-f7gvc\") pod \"migrator-59844c95c7-88mqt\" (UID: \"c909961d-2992-451c-aa4e-4196e1936bbd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.061720 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-key\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.066112 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.067229 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-stats-auth\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.067291 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.067338 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt85t\" (UniqueName: \"kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.067371 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.068174 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.068754 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-config-volume\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.069612 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-registration-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.070123 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-images\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.070877 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-csi-data-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.073819 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj9pz\" (UniqueName: \"kubernetes.io/projected/f85719c9-01af-498b-ba00-7a43b7b923e3-kube-api-access-bj9pz\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.073917 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-config\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.073950 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg5g5\" (UniqueName: \"kubernetes.io/projected/74ee01ca-ad15-4a8f-8c72-0daf093db217-kube-api-access-rg5g5\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.073981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-metrics-certs\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074006 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-profile-collector-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074033 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zqxg\" (UniqueName: \"kubernetes.io/projected/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-kube-api-access-8zqxg\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074056 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shx27\" (UniqueName: \"kubernetes.io/projected/b09df583-33ba-444d-aa5e-62158b7a3f80-kube-api-access-shx27\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074088 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-certs\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074112 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-socket-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074137 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074180 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-apiservice-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074195 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-cabundle\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074353 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzvcj\" (UniqueName: \"kubernetes.io/projected/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-kube-api-access-qzvcj\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074380 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09df583-33ba-444d-aa5e-62158b7a3f80-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074565 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-plugins-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074585 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/154a9dc5-f312-436b-958f-c78ac4ec740c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074619 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09df583-33ba-444d-aa5e-62158b7a3f80-proxy-tls\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074636 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-tmpfs\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074808 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/84029005-e8f5-4697-9fe2-4057607adb5e-proxy-tls\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074936 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19be1a82-6279-466c-a719-e346d59597be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.074968 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwrnq\" (UniqueName: \"kubernetes.io/projected/19be1a82-6279-466c-a719-e346d59597be-kube-api-access-xwrnq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075078 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-mountpoint-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075097 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74ee01ca-ad15-4a8f-8c72-0daf093db217-service-ca-bundle\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075180 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7c6l\" (UniqueName: \"kubernetes.io/projected/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-kube-api-access-t7c6l\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075309 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-cert\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075333 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075395 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-srv-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075414 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v77b9\" (UniqueName: \"kubernetes.io/projected/84029005-e8f5-4697-9fe2-4057607adb5e-kube-api-access-v77b9\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075494 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk565\" (UniqueName: \"kubernetes.io/projected/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-kube-api-access-mk565\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075515 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-webhook-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075663 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m52j5\" (UniqueName: \"kubernetes.io/projected/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-kube-api-access-m52j5\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075824 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-srv-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.075862 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psq99\" (UniqueName: \"kubernetes.io/projected/825049cb-3413-4d9b-81bb-7210dc51a05f-kube-api-access-psq99\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.080681 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-key\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.085969 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.585911502 +0000 UTC m=+154.971888486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.091172 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-config\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.092863 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99a12dc3-7eee-4751-9223-8f8d8098d045-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2v8gh\" (UID: \"99a12dc3-7eee-4751-9223-8f8d8098d045\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.097018 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-socket-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.100319 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn5w9\" (UniqueName: \"kubernetes.io/projected/7b548daf-86aa-4fb1-86a8-3064794876c1-kube-api-access-fn5w9\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.100385 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-default-certificate\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.100602 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.103500 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/84029005-e8f5-4697-9fe2-4057607adb5e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.105102 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f85719c9-01af-498b-ba00-7a43b7b923e3-signing-cabundle\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.108230 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09df583-33ba-444d-aa5e-62158b7a3f80-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.108282 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-plugins-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.108482 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.113284 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-metrics-tls\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.118245 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/154a9dc5-f312-436b-958f-c78ac4ec740c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.118721 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.118798 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-serving-cert\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.119212 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-profile-collector-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.119893 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-srv-cert\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.122078 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-node-bootstrap-token\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.125203 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fa35292d-7cdf-4bb9-8fb0-82296792a332-certs\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.125943 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/825049cb-3413-4d9b-81bb-7210dc51a05f-mountpoint-dir\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.127221 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.127406 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-default-certificate\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.129003 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/84029005-e8f5-4697-9fe2-4057607adb5e-proxy-tls\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.146554 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-tmpfs\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.146949 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-webhook-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.148146 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d41bec2-af11-4659-9864-a8f5684af5da-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.148214 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b548daf-86aa-4fb1-86a8-3064794876c1-srv-cert\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.149481 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-cert\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.151344 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74ee01ca-ad15-4a8f-8c72-0daf093db217-service-ca-bundle\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.154005 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09df583-33ba-444d-aa5e-62158b7a3f80-proxy-tls\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.155397 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-apiservice-cert\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.155811 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-stats-auth\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.164494 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.158742 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64658394-9b30-424c-b243-e997781796e1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cwfc8\" (UID: \"64658394-9b30-424c-b243-e997781796e1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.167845 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhzmk\" (UniqueName: \"kubernetes.io/projected/5191b1fc-36b6-4fae-909b-3d721b39dd49-kube-api-access-nhzmk\") pod \"console-operator-58897d9998-qv4qp\" (UID: \"5191b1fc-36b6-4fae-909b-3d721b39dd49\") " pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.169431 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcfvq\" (UniqueName: \"kubernetes.io/projected/974495e5-ca1d-4c68-9184-244d7c08276b-kube-api-access-vcfvq\") pod \"kube-storage-version-migrator-operator-b67b599dd-9nt6z\" (UID: \"974495e5-ca1d-4c68-9184-244d7c08276b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.169829 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.170246 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19be1a82-6279-466c-a719-e346d59597be-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.175985 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.192152 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ee01ca-ad15-4a8f-8c72-0daf093db217-metrics-certs\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.198360 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4scw\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.203225 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: W1203 08:42:27.203720 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9444d9cb_fe03_4f32_ad40_84901ecfb0d9.slice/crio-390b4dd845b05495e9c48fba7d5098de5d1e28b8889aadbc6f0d6e56e7925988 WatchSource:0}: Error finding container 390b4dd845b05495e9c48fba7d5098de5d1e28b8889aadbc6f0d6e56e7925988: Status 404 returned error can't find the container with id 390b4dd845b05495e9c48fba7d5098de5d1e28b8889aadbc6f0d6e56e7925988 Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.203785 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.703764307 +0000 UTC m=+155.089741291 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.220654 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l27nx"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.222328 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7gvc\" (UniqueName: \"kubernetes.io/projected/c909961d-2992-451c-aa4e-4196e1936bbd-kube-api-access-f7gvc\") pod \"migrator-59844c95c7-88mqt\" (UID: \"c909961d-2992-451c-aa4e-4196e1936bbd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.235321 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6mkz\" (UniqueName: \"kubernetes.io/projected/0d41bec2-af11-4659-9864-a8f5684af5da-kube-api-access-t6mkz\") pod \"package-server-manager-789f6589d5-xbbn2\" (UID: \"0d41bec2-af11-4659-9864-a8f5684af5da\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.247187 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.249985 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jm7l\" (UniqueName: \"kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l\") pod \"marketplace-operator-79b997595-mrptv\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.260805 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.281270 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zcfq\" (UniqueName: \"kubernetes.io/projected/fa35292d-7cdf-4bb9-8fb0-82296792a332-kube-api-access-2zcfq\") pod \"machine-config-server-6svzg\" (UID: \"fa35292d-7cdf-4bb9-8fb0-82296792a332\") " pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.294716 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxwpt\" (UniqueName: \"kubernetes.io/projected/154a9dc5-f312-436b-958f-c78ac4ec740c-kube-api-access-nxwpt\") pod \"multus-admission-controller-857f4d67dd-8rpwp\" (UID: \"154a9dc5-f312-436b-958f-c78ac4ec740c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.301220 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" event={"ID":"c212c1e9-bf50-4619-b4a0-7ae0659f7cab","Type":"ContainerStarted","Data":"553a72810cac753472987081d33ef1553a5e6ace216c3abfe0801c79e64763c0"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.302317 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" event={"ID":"163752d1-e68c-43c0-b869-2d7755551a1d","Type":"ContainerStarted","Data":"04dcbfec2395e99435c261b290a60155426e6313e64591c0c49812b0a44f5493"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.304829 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.305341 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.805326475 +0000 UTC m=+155.191303459 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.309949 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" event={"ID":"7886546d-9341-4fcc-a861-d78d9fa35e98","Type":"ContainerStarted","Data":"8ff554c23b1ba4b69e5250909c215f5ab8f4811c4ff80f154ea6a0f144dd7538"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.316611 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt85t\" (UniqueName: \"kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t\") pod \"collect-profiles-29412510-jwjq6\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.318402 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.330273 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shx27\" (UniqueName: \"kubernetes.io/projected/b09df583-33ba-444d-aa5e-62158b7a3f80-kube-api-access-shx27\") pod \"machine-config-controller-84d6567774-v4pcv\" (UID: \"b09df583-33ba-444d-aa5e-62158b7a3f80\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.338636 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.362990 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.364342 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.374402 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.378611 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg5g5\" (UniqueName: \"kubernetes.io/projected/74ee01ca-ad15-4a8f-8c72-0daf093db217-kube-api-access-rg5g5\") pod \"router-default-5444994796-glrgq\" (UID: \"74ee01ca-ad15-4a8f-8c72-0daf093db217\") " pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.382647 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj9pz\" (UniqueName: \"kubernetes.io/projected/f85719c9-01af-498b-ba00-7a43b7b923e3-kube-api-access-bj9pz\") pod \"service-ca-9c57cc56f-sjsrz\" (UID: \"f85719c9-01af-498b-ba00-7a43b7b923e3\") " pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.386994 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7c6l\" (UniqueName: \"kubernetes.io/projected/dfad5e53-c42f-4e6f-8c41-2d1b7b668e29-kube-api-access-t7c6l\") pod \"olm-operator-6b444d44fb-vbnmn\" (UID: \"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.396770 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.400864 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" event={"ID":"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343","Type":"ContainerStarted","Data":"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.400909 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" event={"ID":"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343","Type":"ContainerStarted","Data":"68a752e2102b8abfbce82650c6ed176893712461426027f30fade658a25e5a16"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.401858 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.409214 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" event={"ID":"9444d9cb-fe03-4f32-ad40-84901ecfb0d9","Type":"ContainerStarted","Data":"390b4dd845b05495e9c48fba7d5098de5d1e28b8889aadbc6f0d6e56e7925988"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.410113 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" event={"ID":"c82788b8-bd1f-49c5-8f63-e8e360008f40","Type":"ContainerStarted","Data":"bd5903104263fc81f3d7d3d4f7bf96e5f9f1ddffa3e09e780522b0162ced15ee"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.411296 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" event={"ID":"a2991975-3b10-4f7d-aa48-750e3c402870","Type":"ContainerStarted","Data":"e482b9c26690d11c937f503bbc0b4c8f87e75c2ad3fde26df140cb6953290c50"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.412058 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416160 4576 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-km29k container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416204 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416340 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416642 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.416870 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.916849971 +0000 UTC m=+155.302826955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416923 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zqxg\" (UniqueName: \"kubernetes.io/projected/2cb8c604-b141-47b0-af3e-edf6ac4cd04c-kube-api-access-8zqxg\") pod \"service-ca-operator-777779d784-p4j4g\" (UID: \"2cb8c604-b141-47b0-af3e-edf6ac4cd04c\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.416974 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp52h\" (UniqueName: \"kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.417070 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.417102 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5rj2\" (UniqueName: \"kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.417563 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:27.917552569 +0000 UTC m=+155.303529553 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.423719 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.424826 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5rj2\" (UniqueName: \"kubernetes.io/projected/bedc7336-f16c-485b-8cc8-13eea705d68a-kube-api-access-f5rj2\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.436446 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp52h\" (UniqueName: \"kubernetes.io/projected/e82ee5bd-9dd0-4d09-a629-21a1d08dbb06-kube-api-access-fp52h\") pod \"openshift-apiserver-operator-796bbdcf4f-lskhl\" (UID: \"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.436996 4576 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-76tfl container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.437050 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.437777 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" event={"ID":"fe3569cb-a99a-4e1a-8b5f-124e68276330","Type":"ContainerStarted","Data":"2b78f9cf0d0807ebfc295c49d793101a668020b4983be37789bfc3f487a9658c"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.445238 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.450822 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk565\" (UniqueName: \"kubernetes.io/projected/f5ebf9cc-8827-431f-9a1b-f6c755a4a460-kube-api-access-mk565\") pod \"ingress-canary-28lz2\" (UID: \"f5ebf9cc-8827-431f-9a1b-f6c755a4a460\") " pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.451759 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v77b9\" (UniqueName: \"kubernetes.io/projected/84029005-e8f5-4697-9fe2-4057607adb5e-kube-api-access-v77b9\") pod \"machine-config-operator-74547568cd-2fhp6\" (UID: \"84029005-e8f5-4697-9fe2-4057607adb5e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.452558 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" event={"ID":"098beae0-8e26-45b7-bf18-cf6e4c83d7c7","Type":"ContainerStarted","Data":"aff8c7ff1989313fbdda1cb774e5c5949bf6130b91d7df1ef7207f959ba2cc3e"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.454513 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" event={"ID":"c57d4112-6506-4f4c-86c2-c6d1249df640","Type":"ContainerStarted","Data":"90dabdccd1cb982d44c364b8ef6785caadc0989f529e438b30e380bb4370c636"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.458472 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" event={"ID":"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea","Type":"ContainerStarted","Data":"3999910c9d8859f01f3a42370384e2f89be8777a45c114dbef606ef8b9413a77"} Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.472171 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m52j5\" (UniqueName: \"kubernetes.io/projected/dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44-kube-api-access-m52j5\") pod \"dns-default-brdzq\" (UID: \"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44\") " pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.487388 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.491216 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.494047 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psq99\" (UniqueName: \"kubernetes.io/projected/825049cb-3413-4d9b-81bb-7210dc51a05f-kube-api-access-psq99\") pod \"csi-hostpathplugin-9xtqh\" (UID: \"825049cb-3413-4d9b-81bb-7210dc51a05f\") " pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.501474 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.509256 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.512780 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.518229 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.519634 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.019616181 +0000 UTC m=+155.405593165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.522500 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.531568 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-28lz2" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.532231 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6qg8m"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.535544 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn5w9\" (UniqueName: \"kubernetes.io/projected/7b548daf-86aa-4fb1-86a8-3064794876c1-kube-api-access-fn5w9\") pod \"catalog-operator-68c6474976-lcjhm\" (UID: \"7b548daf-86aa-4fb1-86a8-3064794876c1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.545244 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6svzg" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.547385 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzvcj\" (UniqueName: \"kubernetes.io/projected/b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2-kube-api-access-qzvcj\") pod \"packageserver-d55dfcdfc-9825w\" (UID: \"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.548600 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m"] Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.558541 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwrnq\" (UniqueName: \"kubernetes.io/projected/19be1a82-6279-466c-a719-e346d59597be-kube-api-access-xwrnq\") pod \"control-plane-machine-set-operator-78cbb6b69f-kbnz6\" (UID: \"19be1a82-6279-466c-a719-e346d59597be\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.564838 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88"] Dec 03 08:42:27 crc kubenswrapper[4576]: W1203 08:42:27.597701 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod711bed39_85e0_418e_b746_659fef7c4b49.slice/crio-6e3fde5e909d96a066010cdb6d688ec044b6d4349deb69d7b79d5609d7eb5ab6 WatchSource:0}: Error finding container 6e3fde5e909d96a066010cdb6d688ec044b6d4349deb69d7b79d5609d7eb5ab6: Status 404 returned error can't find the container with id 6e3fde5e909d96a066010cdb6d688ec044b6d4349deb69d7b79d5609d7eb5ab6 Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.609152 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.622808 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.623454 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.1230948 +0000 UTC m=+155.509071784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.626747 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.647046 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.683288 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.781392 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.782116 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.782369 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.282354188 +0000 UTC m=+155.668331172 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.801740 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" Dec 03 08:42:27 crc kubenswrapper[4576]: W1203 08:42:27.851020 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod598b95f5_9016_454b_be5b_4b9a4145479b.slice/crio-9603af87cf7d5fa00985f589be9d45ae4ac60f5e2aebe856651e37c96714ee06 WatchSource:0}: Error finding container 9603af87cf7d5fa00985f589be9d45ae4ac60f5e2aebe856651e37c96714ee06: Status 404 returned error can't find the container with id 9603af87cf7d5fa00985f589be9d45ae4ac60f5e2aebe856651e37c96714ee06 Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.883088 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.883663 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.383649719 +0000 UTC m=+155.769626703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.983936 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.984161 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:27 crc kubenswrapper[4576]: E1203 08:42:27.984381 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.484354854 +0000 UTC m=+155.870331838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:27 crc kubenswrapper[4576]: I1203 08:42:27.985010 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bedc7336-f16c-485b-8cc8-13eea705d68a-config\") pod \"machine-api-operator-5694c8668f-nsf2s\" (UID: \"bedc7336-f16c-485b-8cc8-13eea705d68a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.086051 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.086504 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.586491687 +0000 UTC m=+155.972468671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.198632 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.199321 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.699302438 +0000 UTC m=+156.085279422 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.199765 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.295458 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh"] Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.303773 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.304190 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.804176995 +0000 UTC m=+156.190153979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.407947 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.408291 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:28.90826084 +0000 UTC m=+156.294237824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: W1203 08:42:28.487561 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ee01ca_ad15_4a8f_8c72_0daf093db217.slice/crio-18197008ca88532af9ab5589e7bdea6119bc0abb8f740e8f77c73f6c603ed937 WatchSource:0}: Error finding container 18197008ca88532af9ab5589e7bdea6119bc0abb8f740e8f77c73f6c603ed937: Status 404 returned error can't find the container with id 18197008ca88532af9ab5589e7bdea6119bc0abb8f740e8f77c73f6c603ed937 Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.496343 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" event={"ID":"c82788b8-bd1f-49c5-8f63-e8e360008f40","Type":"ContainerStarted","Data":"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.496550 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.498328 4576 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-gnxs8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.498394 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.509253 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.509618 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.009604312 +0000 UTC m=+156.395581296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.510567 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" podStartSLOduration=133.510548078 podStartE2EDuration="2m13.510548078s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:28.476088852 +0000 UTC m=+155.862065836" watchObservedRunningTime="2025-12-03 08:42:28.510548078 +0000 UTC m=+155.896525062" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.553248 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" event={"ID":"c212c1e9-bf50-4619-b4a0-7ae0659f7cab","Type":"ContainerStarted","Data":"a6cfc03b683af61a4e4b7d528cafd0939390b5c0c485f01f2a152118dda10791"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.565148 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-qv4qp"] Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.570191 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" event={"ID":"163752d1-e68c-43c0-b869-2d7755551a1d","Type":"ContainerStarted","Data":"3f96f81a6f08854714db91661f288e639f736d28bd74a2649dd657464fc4b358"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.571855 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l27nx" event={"ID":"f9d3808c-11ef-421d-83e6-b909679c5490","Type":"ContainerStarted","Data":"1e7bd6cdead587ce9d9f8c3ec33a9d7f192d991a622cd00d52319bbe2699634d"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.606182 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" event={"ID":"5375d73e-a10f-4e11-a6a7-9ec01e8a60ea","Type":"ContainerStarted","Data":"771fb5ad4667b614c5f5808e5033a34754bbd0d8f1103a83100602d88bb3d021"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.616104 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.616496 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.116477003 +0000 UTC m=+156.502453987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.622322 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" event={"ID":"711bed39-85e0-418e-b746-659fef7c4b49","Type":"ContainerStarted","Data":"6e3fde5e909d96a066010cdb6d688ec044b6d4349deb69d7b79d5609d7eb5ab6"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.623573 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" event={"ID":"598b95f5-9016-454b-be5b-4b9a4145479b","Type":"ContainerStarted","Data":"9603af87cf7d5fa00985f589be9d45ae4ac60f5e2aebe856651e37c96714ee06"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.630341 4576 generic.go:334] "Generic (PLEG): container finished" podID="fe3569cb-a99a-4e1a-8b5f-124e68276330" containerID="952311baec0c6f6d6a04d270da1928ebac9c5a58075253066c69b95ec08f5a90" exitCode=0 Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.631108 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" event={"ID":"fe3569cb-a99a-4e1a-8b5f-124e68276330","Type":"ContainerDied","Data":"952311baec0c6f6d6a04d270da1928ebac9c5a58075253066c69b95ec08f5a90"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.680393 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" event={"ID":"2e949c9f-48d1-4da5-90a2-21e446f64020","Type":"ContainerStarted","Data":"c0a67ed10b2b34f14c3dde9d00fbfed70b911f2f7df6eae698d8cfba63017226"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.723725 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.729157 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.229140729 +0000 UTC m=+156.615117713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.777160 4576 generic.go:334] "Generic (PLEG): container finished" podID="098beae0-8e26-45b7-bf18-cf6e4c83d7c7" containerID="d1b6c51ce3ef3f6b924dea4456752ca4f45193e0de93ed8d2428c82f2486d8a7" exitCode=0 Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.777271 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" event={"ID":"098beae0-8e26-45b7-bf18-cf6e4c83d7c7","Type":"ContainerDied","Data":"d1b6c51ce3ef3f6b924dea4456752ca4f45193e0de93ed8d2428c82f2486d8a7"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.822289 4576 generic.go:334] "Generic (PLEG): container finished" podID="7886546d-9341-4fcc-a861-d78d9fa35e98" containerID="e73050ba9baaf7b4cd17151ffa073c9a880565f238d98d0c87a7a7736b5dd9f9" exitCode=0 Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.822669 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" event={"ID":"7886546d-9341-4fcc-a861-d78d9fa35e98","Type":"ContainerDied","Data":"e73050ba9baaf7b4cd17151ffa073c9a880565f238d98d0c87a7a7736b5dd9f9"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.824012 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.824515 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.32450133 +0000 UTC m=+156.710478314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.865700 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" event={"ID":"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a","Type":"ContainerStarted","Data":"1ffd31d340fda603ede2d9860cdd5bbe106e595e67edb3c598af42b9846201e4"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.916806 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h7ncw" event={"ID":"3cb91673-2622-41a5-91d3-a79e0ba7289b","Type":"ContainerStarted","Data":"75ae8f891e53c283ae62ae63b91c952fabfd4271f51910d59023376d2c709cc4"} Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.923579 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.925447 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.943811 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:42:28 crc kubenswrapper[4576]: I1203 08:42:28.945302 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" podStartSLOduration=133.945178471 podStartE2EDuration="2m13.945178471s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:28.918612188 +0000 UTC m=+156.304589172" watchObservedRunningTime="2025-12-03 08:42:28.945178471 +0000 UTC m=+156.331155455" Dec 03 08:42:28 crc kubenswrapper[4576]: E1203 08:42:28.947965 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.447943776 +0000 UTC m=+156.833920760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.041166 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.043255 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.543237245 +0000 UTC m=+156.929214229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.046193 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.091964 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.142293 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.142684 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.642672586 +0000 UTC m=+157.028649570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.190319 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-brdzq"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.246948 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.252353 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.252917 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.752895597 +0000 UTC m=+157.138872581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.270261 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.354922 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.355411 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.85539789 +0000 UTC m=+157.241374874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.455571 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.456033 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:29.956013413 +0000 UTC m=+157.341990397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: W1203 08:42:29.475723 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19be1a82_6279_466c_a719_e346d59597be.slice/crio-ef2e8b9b66900d453e2a3be3e7bb8725df997a7c7bd9f8d4447d10229b91a963 WatchSource:0}: Error finding container ef2e8b9b66900d453e2a3be3e7bb8725df997a7c7bd9f8d4447d10229b91a963: Status 404 returned error can't find the container with id ef2e8b9b66900d453e2a3be3e7bb8725df997a7c7bd9f8d4447d10229b91a963 Dec 03 08:42:29 crc kubenswrapper[4576]: W1203 08:42:29.490181 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d41bec2_af11_4659_9864_a8f5684af5da.slice/crio-33e459a651e9b037355f5a1ad278e7c298cd16575c61b5dad2f05f467c161c30 WatchSource:0}: Error finding container 33e459a651e9b037355f5a1ad278e7c298cd16575c61b5dad2f05f467c161c30: Status 404 returned error can't find the container with id 33e459a651e9b037355f5a1ad278e7c298cd16575c61b5dad2f05f467c161c30 Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.558106 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" podStartSLOduration=134.558084524 podStartE2EDuration="2m14.558084524s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:29.517305539 +0000 UTC m=+156.903282513" watchObservedRunningTime="2025-12-03 08:42:29.558084524 +0000 UTC m=+156.944061508" Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.558488 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.558855 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.058839475 +0000 UTC m=+157.444816459 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.621202 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5jnbz" podStartSLOduration=134.621170079 podStartE2EDuration="2m14.621170079s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:29.618141668 +0000 UTC m=+157.004118652" watchObservedRunningTime="2025-12-03 08:42:29.621170079 +0000 UTC m=+157.007147063" Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.635204 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.665049 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.665543 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.16550679 +0000 UTC m=+157.551483764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.728386 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.736659 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn"] Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.776246 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" podStartSLOduration=133.776226864 podStartE2EDuration="2m13.776226864s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:29.773306515 +0000 UTC m=+157.159283499" watchObservedRunningTime="2025-12-03 08:42:29.776226864 +0000 UTC m=+157.162203838" Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.778921 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.779246 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.279233344 +0000 UTC m=+157.665210328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:29 crc kubenswrapper[4576]: I1203 08:42:29.898489 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:29 crc kubenswrapper[4576]: E1203 08:42:29.899103 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.399085564 +0000 UTC m=+157.785062548 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.020692 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.021094 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.521080911 +0000 UTC m=+157.907057895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.064179 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.082894 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" event={"ID":"c909961d-2992-451c-aa4e-4196e1936bbd","Type":"ContainerStarted","Data":"910a6e2d757f3e16a58b78ed0a0198005898d3f979194a027f2bbf558fd8fce3"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.084159 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-brdzq" event={"ID":"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44","Type":"ContainerStarted","Data":"5ca1a6123da2834547665410e2ce0020b22309f073e74a8441d5ab02da4af18d"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.084983 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" event={"ID":"19be1a82-6279-466c-a719-e346d59597be","Type":"ContainerStarted","Data":"ef2e8b9b66900d453e2a3be3e7bb8725df997a7c7bd9f8d4447d10229b91a963"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.085920 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" event={"ID":"84029005-e8f5-4697-9fe2-4057607adb5e","Type":"ContainerStarted","Data":"077389a78fa9c05b8998a5dca671463baeb6c0f084deb8293e65f7c68a67ad00"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.087206 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" event={"ID":"4ddf4f4a-13ff-4d6d-82e8-4e509e1d293a","Type":"ContainerStarted","Data":"dffb2a6461d2e6057b80ae3c962a4138be0eb9748fc508fabe37f40426ee29d5"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.130135 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.130550 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.63051701 +0000 UTC m=+158.016493994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.148727 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8xz88" podStartSLOduration=135.148710069 podStartE2EDuration="2m15.148710069s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.146492569 +0000 UTC m=+157.532469553" watchObservedRunningTime="2025-12-03 08:42:30.148710069 +0000 UTC m=+157.534687053" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.152973 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" event={"ID":"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29","Type":"ContainerStarted","Data":"c7ec8ab4d6c2e1820b0f42f58e50061003bb7349e8e88b8cd3be7b2e762a3ae1"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.226884 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" event={"ID":"2541618c-f550-48ef-9316-77a5dd7f1084","Type":"ContainerStarted","Data":"426c76b3f96473763a418bf6e86163a724de52074a4840d627efdaa4b72dcf16"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.231416 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.232736 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.732724515 +0000 UTC m=+158.118701499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.234453 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.332843 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.333257 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.833241525 +0000 UTC m=+158.219218509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.370723 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" event={"ID":"5191b1fc-36b6-4fae-909b-3d721b39dd49","Type":"ContainerStarted","Data":"4adc4f464427971ff273f3c31f81479a4148199dd5f69558f9f881ff7c71000d"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.382913 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" event={"ID":"c57d4112-6506-4f4c-86c2-c6d1249df640","Type":"ContainerStarted","Data":"5143b4a509b0ee498642c46398bed4ae1bec75eb517d9bf117f560d88cdfcf9c"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.418242 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h7ncw" event={"ID":"3cb91673-2622-41a5-91d3-a79e0ba7289b","Type":"ContainerStarted","Data":"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.433609 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" podStartSLOduration=135.43359129 podStartE2EDuration="2m15.43359129s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.431175136 +0000 UTC m=+157.817152120" watchObservedRunningTime="2025-12-03 08:42:30.43359129 +0000 UTC m=+157.819568274" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.434291 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.434636 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:30.934623408 +0000 UTC m=+158.320600392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.447177 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" event={"ID":"974495e5-ca1d-4c68-9184-244d7c08276b","Type":"ContainerStarted","Data":"e4857ae478ff3a6705f3f08b3b52473dc02663db14a3a16308e82390188b790d"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.459195 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-28lz2"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.516186 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" event={"ID":"598b95f5-9016-454b-be5b-4b9a4145479b","Type":"ContainerStarted","Data":"32e06873c45ba4bfe0dfb3f3b4fdb0e68ed5c5eea5cee69f5b3f609828db7ab4"} Dec 03 08:42:30 crc kubenswrapper[4576]: W1203 08:42:30.532072 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64658394_9b30_424c_b243_e997781796e1.slice/crio-f8d47120d99a9c101c325cc03ca36b007722e360bc370be282eb26ae0b5a1c27 WatchSource:0}: Error finding container f8d47120d99a9c101c325cc03ca36b007722e360bc370be282eb26ae0b5a1c27: Status 404 returned error can't find the container with id f8d47120d99a9c101c325cc03ca36b007722e360bc370be282eb26ae0b5a1c27 Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.532271 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" event={"ID":"99a12dc3-7eee-4751-9223-8f8d8098d045","Type":"ContainerStarted","Data":"f0cbd79d27722268c5e124c162930a9be98bff5b467c12c8772c4e76f9a29235"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.535369 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.536475 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.036446983 +0000 UTC m=+158.422423967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.560198 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-glrgq" event={"ID":"74ee01ca-ad15-4a8f-8c72-0daf093db217","Type":"ContainerStarted","Data":"18197008ca88532af9ab5589e7bdea6119bc0abb8f740e8f77c73f6c603ed937"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.568682 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" event={"ID":"711bed39-85e0-418e-b746-659fef7c4b49","Type":"ContainerStarted","Data":"031675b835bed26a89240202df9b7a0e3b4f49f7d2fb0c6260d765a46a26e6c8"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.589846 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" event={"ID":"9444d9cb-fe03-4f32-ad40-84901ecfb0d9","Type":"ContainerStarted","Data":"a87ba4d41ff3486ccca6d43467e92aa20a5b4a665dbdb804ee016c0f23155da1"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.606251 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-h7ncw" podStartSLOduration=135.606231778 podStartE2EDuration="2m15.606231778s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.491810254 +0000 UTC m=+157.877787238" watchObservedRunningTime="2025-12-03 08:42:30.606231778 +0000 UTC m=+157.992208762" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.608763 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8rpwp"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.608791 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l27nx" event={"ID":"f9d3808c-11ef-421d-83e6-b909679c5490","Type":"ContainerStarted","Data":"d5aa7b0f14591d541544d125355c26efbf5ed950d2c86096fbbf0b32dc5e8bb1"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.608812 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.609006 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tjv2m" podStartSLOduration=135.608996512 podStartE2EDuration="2m15.608996512s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.605086397 +0000 UTC m=+157.991063381" watchObservedRunningTime="2025-12-03 08:42:30.608996512 +0000 UTC m=+157.994973496" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.640744 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.642634 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.642723 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.642785 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.644228 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.644538 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.144507166 +0000 UTC m=+158.530484150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.647140 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.666078 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" event={"ID":"0d41bec2-af11-4659-9864-a8f5684af5da","Type":"ContainerStarted","Data":"33e459a651e9b037355f5a1ad278e7c298cd16575c61b5dad2f05f467c161c30"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.675805 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6svzg" event={"ID":"fa35292d-7cdf-4bb9-8fb0-82296792a332","Type":"ContainerStarted","Data":"923dfe8227a322679c87e5d9ca7fafa06e83fb7ceaef0a7cd82e1c1c2129ede7"} Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.714473 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.717546 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-l27nx" podStartSLOduration=135.717507116 podStartE2EDuration="2m15.717507116s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.715085042 +0000 UTC m=+158.101062026" watchObservedRunningTime="2025-12-03 08:42:30.717507116 +0000 UTC m=+158.103484090" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.717680 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-q56k8" podStartSLOduration=135.717672821 podStartE2EDuration="2m15.717672821s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:30.667211895 +0000 UTC m=+158.053188899" watchObservedRunningTime="2025-12-03 08:42:30.717672821 +0000 UTC m=+158.103649805" Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.745223 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.746444 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.246427493 +0000 UTC m=+158.632404477 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.771705 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl"] Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.848129 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.848539 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.348511845 +0000 UTC m=+158.734488819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:30 crc kubenswrapper[4576]: I1203 08:42:30.949828 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:30 crc kubenswrapper[4576]: E1203 08:42:30.950185 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.450169645 +0000 UTC m=+158.836146629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.039049 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g"] Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.074774 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.075969 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.575950304 +0000 UTC m=+158.961927288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.078493 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nsf2s"] Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.105830 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9xtqh"] Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.177199 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.177592 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.677573644 +0000 UTC m=+159.063550628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.255898 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-sjsrz"] Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.280241 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.280725 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.780706654 +0000 UTC m=+159.166683638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: W1203 08:42:31.376810 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85719c9_01af_498b_ba00_7a43b7b923e3.slice/crio-73a01ad3c8b0286425423299cff1a20af5694cc039076ac44294b8164bbf7054 WatchSource:0}: Error finding container 73a01ad3c8b0286425423299cff1a20af5694cc039076ac44294b8164bbf7054: Status 404 returned error can't find the container with id 73a01ad3c8b0286425423299cff1a20af5694cc039076ac44294b8164bbf7054 Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.381039 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.381479 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.881463389 +0000 UTC m=+159.267440363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.487340 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.487733 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:31.987721344 +0000 UTC m=+159.373698328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.590703 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.591384 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.091367508 +0000 UTC m=+159.477344492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.693710 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.694097 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.194085387 +0000 UTC m=+159.580062371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.741869 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" event={"ID":"2cb8c604-b141-47b0-af3e-edf6ac4cd04c","Type":"ContainerStarted","Data":"f259dd6ecaf455f93b77c38cfb336611432d063f05b61c329476216c03a23be5"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.761744 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" event={"ID":"825049cb-3413-4d9b-81bb-7210dc51a05f","Type":"ContainerStarted","Data":"8c13b3795deae43482ede37f444bde29121fac9f6c78b40b369699b44077a46a"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.795069 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.796066 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.296048176 +0000 UTC m=+159.682025160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.803693 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" event={"ID":"711bed39-85e0-418e-b746-659fef7c4b49","Type":"ContainerStarted","Data":"05764389bb72b5f91605d8f4ab93150d1cd0b5e15983563f9bf3006b4b8ea4ed"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.818599 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" event={"ID":"098beae0-8e26-45b7-bf18-cf6e4c83d7c7","Type":"ContainerStarted","Data":"20dd1fcef313e10404875510f0b79e92e466a566512eb5d73b4f9da3841938a4"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.819276 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.860445 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" event={"ID":"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2","Type":"ContainerStarted","Data":"e6b1158ad759f8e00f6cb0e09e7da8a5b78683f0cf69ce9e6b8f47cbd76c05c4"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.884866 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" event={"ID":"974495e5-ca1d-4c68-9184-244d7c08276b","Type":"ContainerStarted","Data":"33e2f53cb3dfa40a7613b5e700206841743b2f8c46bc5d0e4697e6bfbb037fd2"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.894198 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" event={"ID":"7b548daf-86aa-4fb1-86a8-3064794876c1","Type":"ContainerStarted","Data":"c2d4329dcc01f88c610d8be5686ddc1f90aec34f4831044312c0e8eea40de2b6"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.899779 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:31 crc kubenswrapper[4576]: E1203 08:42:31.902139 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.402123105 +0000 UTC m=+159.788100089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.933404 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" event={"ID":"fe3569cb-a99a-4e1a-8b5f-124e68276330","Type":"ContainerStarted","Data":"90f3299475c6058b6cb3c13b0ab97c2456e2e5f33920f862d4ddf24b0a78e192"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.942655 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" event={"ID":"70305d8f-8bdf-437b-ba4d-aaadc408205d","Type":"ContainerStarted","Data":"c0f9efeb14c4f6705b47d478fb0ebecd1dfdfe68ede752e91c389c27597773f4"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.944811 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-glrgq" event={"ID":"74ee01ca-ad15-4a8f-8c72-0daf093db217","Type":"ContainerStarted","Data":"03977c10a3d74883e11196cd1b102d0874d2c37e5fba23fc622f17f224cc48d9"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.958377 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jqrv8" podStartSLOduration=136.958357615 podStartE2EDuration="2m16.958357615s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:31.957902413 +0000 UTC m=+159.343879397" watchObservedRunningTime="2025-12-03 08:42:31.958357615 +0000 UTC m=+159.344334599" Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.966925 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-28lz2" event={"ID":"f5ebf9cc-8827-431f-9a1b-f6c755a4a460","Type":"ContainerStarted","Data":"472a2e9216549e9a3e83ce5ccef72f5d766720afb2ec618e8a11394cc8dba11d"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.973399 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" event={"ID":"154a9dc5-f312-436b-958f-c78ac4ec740c","Type":"ContainerStarted","Data":"50963679cb3af91ec1128ccf57a1c804c6b8ea7ebd1942765243f76727bdc635"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.976502 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" event={"ID":"f85719c9-01af-498b-ba00-7a43b7b923e3","Type":"ContainerStarted","Data":"73a01ad3c8b0286425423299cff1a20af5694cc039076ac44294b8164bbf7054"} Dec 03 08:42:31 crc kubenswrapper[4576]: I1203 08:42:31.994811 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" event={"ID":"99a12dc3-7eee-4751-9223-8f8d8098d045","Type":"ContainerStarted","Data":"bb27831ca314e0f8bb397e95591c7932b7c09b7b4ab011841ee057ecc5db9ce5"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.001942 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.002986 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.502971093 +0000 UTC m=+159.888948077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.023735 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" event={"ID":"7886546d-9341-4fcc-a861-d78d9fa35e98","Type":"ContainerStarted","Data":"4315811ebaa0e45d2cc08f6589fc324599aa561b9077dc6edf9f481b1ce547e6"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.106249 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.108036 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-brdzq" event={"ID":"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44","Type":"ContainerStarted","Data":"a6c03f396d5b07ca56ddce3cd6da4912ebb0c0a0f6738ca130b0efbaf7a908f4"} Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.108172 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.608158549 +0000 UTC m=+159.994135533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.124297 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" event={"ID":"19be1a82-6279-466c-a719-e346d59597be","Type":"ContainerStarted","Data":"f58f02073fb6eb3a286b792bd6c2c109cf1eeedcf0c8f51a7cdc23f22768ce40"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.152938 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" podStartSLOduration=137.152917471 podStartE2EDuration="2m17.152917471s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.090207857 +0000 UTC m=+159.476184841" watchObservedRunningTime="2025-12-03 08:42:32.152917471 +0000 UTC m=+159.538894455" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.154344 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-glrgq" podStartSLOduration=137.154323009 podStartE2EDuration="2m17.154323009s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.152723266 +0000 UTC m=+159.538700240" watchObservedRunningTime="2025-12-03 08:42:32.154323009 +0000 UTC m=+159.540315143" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.161881 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" event={"ID":"2e949c9f-48d1-4da5-90a2-21e446f64020","Type":"ContainerStarted","Data":"aa5767f4c001f61c06d3464ae6ee8254ffc43169d5227a19413589df7a8023c6"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.192161 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" event={"ID":"0d41bec2-af11-4659-9864-a8f5684af5da","Type":"ContainerStarted","Data":"39445ed375c5bb8a2a29fe2a9b31f86cdb110a398f863b623583421c84a91ede"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.199075 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9nt6z" podStartSLOduration=137.19905624 podStartE2EDuration="2m17.19905624s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.198125166 +0000 UTC m=+159.584102160" watchObservedRunningTime="2025-12-03 08:42:32.19905624 +0000 UTC m=+159.585033224" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.207296 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.207769 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.707741184 +0000 UTC m=+160.093718158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.207841 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.208180 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.708171706 +0000 UTC m=+160.094148690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.247814 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-22vzn" event={"ID":"c57d4112-6506-4f4c-86c2-c6d1249df640","Type":"ContainerStarted","Data":"8cd92f6fae304618b7dd5ea628c8b97c0efce49ab488b674fb5fadaa59d2247a"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.284165 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2v8gh" podStartSLOduration=137.284143316 podStartE2EDuration="2m17.284143316s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.282572824 +0000 UTC m=+159.668549808" watchObservedRunningTime="2025-12-03 08:42:32.284143316 +0000 UTC m=+159.670120300" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.306862 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" event={"ID":"64658394-9b30-424c-b243-e997781796e1","Type":"ContainerStarted","Data":"f8d47120d99a9c101c325cc03ca36b007722e360bc370be282eb26ae0b5a1c27"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.308407 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.311657 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.811635165 +0000 UTC m=+160.197612149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.408459 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" event={"ID":"163752d1-e68c-43c0-b869-2d7755551a1d","Type":"ContainerStarted","Data":"a600476e9556946323073169e64aab45db09fddeb25d1cdac748aab011ee692c"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.410609 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.416718 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.431072 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:32.931054572 +0000 UTC m=+160.317031556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.434969 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:32 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:32 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:32 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.435031 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.454926 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" event={"ID":"bedc7336-f16c-485b-8cc8-13eea705d68a","Type":"ContainerStarted","Data":"3cfea595428f3f59efcfb125d3d42f2385275c2f9b1bc27f7f5f7bee8b495214"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.472296 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6svzg" event={"ID":"fa35292d-7cdf-4bb9-8fb0-82296792a332","Type":"ContainerStarted","Data":"1561c7962f5cda65140be1d13f436d9cd4b0c2681b6102dd60cc3cda115629ca"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.530055 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.531238 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.031222214 +0000 UTC m=+160.417199198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.537522 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" event={"ID":"2541618c-f550-48ef-9316-77a5dd7f1084","Type":"ContainerStarted","Data":"4b1273cfc28de5af495b51f60a1cc5facbdcb34b68b97741f59f227322037917"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.538471 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.546672 4576 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mrptv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.546739 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.601130 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-6qg8m" podStartSLOduration=137.60110846 podStartE2EDuration="2m17.60110846s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.579661173 +0000 UTC m=+159.965638147" watchObservedRunningTime="2025-12-03 08:42:32.60110846 +0000 UTC m=+159.987085444" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.601447 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kbnz6" podStartSLOduration=136.601441909 podStartE2EDuration="2m16.601441909s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.409264757 +0000 UTC m=+159.795241751" watchObservedRunningTime="2025-12-03 08:42:32.601441909 +0000 UTC m=+159.987418893" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.637898 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.639245 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.139224513 +0000 UTC m=+160.525201497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.661344 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" event={"ID":"5191b1fc-36b6-4fae-909b-3d721b39dd49","Type":"ContainerStarted","Data":"3f542e3609122910a8c5442d1295486bfd8b44a1f68ea23764165958a932e78c"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.664317 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.665463 4576 patch_prober.go:28] interesting pod/console-operator-58897d9998-qv4qp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/readyz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.665536 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" podUID="5191b1fc-36b6-4fae-909b-3d721b39dd49" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/readyz\": dial tcp 10.217.0.28:8443: connect: connection refused" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.668314 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" podStartSLOduration=136.668119949 podStartE2EDuration="2m16.668119949s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.664958414 +0000 UTC m=+160.050935398" watchObservedRunningTime="2025-12-03 08:42:32.668119949 +0000 UTC m=+160.054096933" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.704443 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" event={"ID":"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06","Type":"ContainerStarted","Data":"b5a0c0a8528717cbec299a6829a37ca5e1179c3c2f70dd90142d4a9cdb689325"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.743642 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.744698 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.244683176 +0000 UTC m=+160.630660160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.764796 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" event={"ID":"b09df583-33ba-444d-aa5e-62158b7a3f80","Type":"ContainerStarted","Data":"5c56d3daa4d47dbe9fdd134a47fa7a4d7087b40f68045e3028c82bc795815103"} Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.783880 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.783968 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.786047 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" podStartSLOduration=137.786036147 podStartE2EDuration="2m17.786036147s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.784352281 +0000 UTC m=+160.170329255" watchObservedRunningTime="2025-12-03 08:42:32.786036147 +0000 UTC m=+160.172013131" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.831038 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-6tzl7" podStartSLOduration=137.831017045 podStartE2EDuration="2m17.831017045s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.83009773 +0000 UTC m=+160.216074714" watchObservedRunningTime="2025-12-03 08:42:32.831017045 +0000 UTC m=+160.216994029" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.846335 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.853258 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.353242802 +0000 UTC m=+160.739219786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.895953 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6svzg" podStartSLOduration=8.895927058 podStartE2EDuration="8.895927058s" podCreationTimestamp="2025-12-03 08:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.892266489 +0000 UTC m=+160.278243483" watchObservedRunningTime="2025-12-03 08:42:32.895927058 +0000 UTC m=+160.281904052" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.947632 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:32 crc kubenswrapper[4576]: E1203 08:42:32.948111 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.448093919 +0000 UTC m=+160.834070903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.948608 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" podStartSLOduration=136.948586583 podStartE2EDuration="2m16.948586583s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.945809648 +0000 UTC m=+160.331786632" watchObservedRunningTime="2025-12-03 08:42:32.948586583 +0000 UTC m=+160.334563567" Dec 03 08:42:32 crc kubenswrapper[4576]: I1203 08:42:32.989570 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" podStartSLOduration=137.989548363 podStartE2EDuration="2m17.989548363s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:32.976334638 +0000 UTC m=+160.362311642" watchObservedRunningTime="2025-12-03 08:42:32.989548363 +0000 UTC m=+160.375525347" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.048822 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.049804 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.549781501 +0000 UTC m=+160.935758485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.150207 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.153034 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.652982683 +0000 UTC m=+161.038959697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.252363 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.253226 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.753192444 +0000 UTC m=+161.139169418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.355126 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.355735 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.855697327 +0000 UTC m=+161.241674311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.434814 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:33 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:33 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:33 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.434949 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.457012 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.458168 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:33.95815365 +0000 UTC m=+161.344130634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.618967 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.619412 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.119396251 +0000 UTC m=+161.505373235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.722997 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.724569 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.224498783 +0000 UTC m=+161.610475767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.750543 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" podStartSLOduration=138.750472221 podStartE2EDuration="2m18.750472221s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:33.046075661 +0000 UTC m=+160.432052645" watchObservedRunningTime="2025-12-03 08:42:33.750472221 +0000 UTC m=+161.136449225" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.809903 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" event={"ID":"dfad5e53-c42f-4e6f-8c41-2d1b7b668e29","Type":"ContainerStarted","Data":"730d5ddb82356cc105b46ffe11adc9061707fc32ce0a8ab0e57e4a7708ec88d3"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.811823 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.820546 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" event={"ID":"84029005-e8f5-4697-9fe2-4057607adb5e","Type":"ContainerStarted","Data":"058f72c5c12f8ed769082376bf896984ef4cdf1f52cfdfe3ff098b49e4469da8"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.820587 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" event={"ID":"84029005-e8f5-4697-9fe2-4057607adb5e","Type":"ContainerStarted","Data":"352d61a4775a4f1cef1aceb74c94fb6344e39fb1f81e7d3fd96745db780e7897"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.826071 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.826383 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.32636844 +0000 UTC m=+161.712345424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.835948 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" event={"ID":"b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2","Type":"ContainerStarted","Data":"c4f3541b9200cc1bd57de5bddb15263e87e92ee5556ef6629f22f860470dc2e6"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.836452 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.850797 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" event={"ID":"2cb8c604-b141-47b0-af3e-edf6ac4cd04c","Type":"ContainerStarted","Data":"fa630dcf98ac65ae8f4c8921990f7705b4304138b0f8b83435b5469c40e9d6ca"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.914276 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" event={"ID":"825049cb-3413-4d9b-81bb-7210dc51a05f","Type":"ContainerStarted","Data":"20861a71402ab5ae4f284014c68f64c1f1d8006c29f1d28bc62eac4f9cd3688e"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.927075 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:33 crc kubenswrapper[4576]: E1203 08:42:33.928449 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.428433871 +0000 UTC m=+161.814410865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.949090 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-brdzq" event={"ID":"dc0c496c-fa01-4d20-8bb1-0e0a5f0dda44","Type":"ContainerStarted","Data":"cbbc4ad0f017bf1c6ae273ad2691fd5deaea0300672753b7b72602774869e157"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.950263 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.959194 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" event={"ID":"0d41bec2-af11-4659-9864-a8f5684af5da","Type":"ContainerStarted","Data":"89b2d6da460bba54ad21be5330c8a10a9646abe5b779a8bb8a35f242660f1f73"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.959757 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.971255 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-28lz2" event={"ID":"f5ebf9cc-8827-431f-9a1b-f6c755a4a460","Type":"ContainerStarted","Data":"31da89ff23d7d80a4633f740bf2c71b4b47cd2c211f13a5ef4368ee8f55f75ee"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.982885 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" event={"ID":"c909961d-2992-451c-aa4e-4196e1936bbd","Type":"ContainerStarted","Data":"e4c1f63e2bd3f0e3954814e48b3305b0e03958f13cc01501f558dab40c2bf899"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.982927 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" event={"ID":"c909961d-2992-451c-aa4e-4196e1936bbd","Type":"ContainerStarted","Data":"34b194d5cc3527e97a9fea29582c6e178122e754bb3ffaf6a2ce241fc959f7d3"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.985654 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" event={"ID":"7b548daf-86aa-4fb1-86a8-3064794876c1","Type":"ContainerStarted","Data":"e71731c516fbc85ba5c418cbcac59af19b9799b5f731f4740ba3ce6af5bac638"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.986273 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.989358 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" event={"ID":"154a9dc5-f312-436b-958f-c78ac4ec740c","Type":"ContainerStarted","Data":"9e77a2904812c0aea13ab605af2d90f5e231645c04675c13bacdde6dade9eda6"} Dec 03 08:42:33 crc kubenswrapper[4576]: I1203 08:42:33.989401 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" event={"ID":"154a9dc5-f312-436b-958f-c78ac4ec740c","Type":"ContainerStarted","Data":"aa5537f22ef2c87ede6a47e1d0d0ac8a8d9b3e95f6c990b077856ea173403bab"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.001445 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" event={"ID":"70305d8f-8bdf-437b-ba4d-aaadc408205d","Type":"ContainerStarted","Data":"7427416535c820c63b653125794914811da6781790bd7deb7d82db900b897430"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.020304 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" event={"ID":"f85719c9-01af-498b-ba00-7a43b7b923e3","Type":"ContainerStarted","Data":"582cbf3b17d17c0ef8a059e8102a52e8722063b93c95efbe3273b72d16055ec4"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.028374 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.028673 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.528650993 +0000 UTC m=+161.914627977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.049760 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" event={"ID":"bedc7336-f16c-485b-8cc8-13eea705d68a","Type":"ContainerStarted","Data":"bf35916106c5b738331c947783e37dcb0040208eb73582f5a9ef65f89510a20a"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.049832 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" event={"ID":"bedc7336-f16c-485b-8cc8-13eea705d68a","Type":"ContainerStarted","Data":"d3987f7424b64bf422c3f0634dfaced49dd3d33323cdb9ec53a508aab7ad4b37"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.066816 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.073292 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" event={"ID":"b09df583-33ba-444d-aa5e-62158b7a3f80","Type":"ContainerStarted","Data":"f2d7b250b92c41499c1f3647e08856a39d50411b9512910d61138506fbca04f1"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.073351 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" event={"ID":"b09df583-33ba-444d-aa5e-62158b7a3f80","Type":"ContainerStarted","Data":"8c678e9a857b4785140783157360a420f5f30bb1e33b07fb838d467feffe8833"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.089068 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.103195 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cwfc8" event={"ID":"64658394-9b30-424c-b243-e997781796e1","Type":"ContainerStarted","Data":"e7660c1b0b519375ff1e1748182f0a37da8ac26002f15cfae33ddfc7680d4f98"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.132250 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.133708 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.633691364 +0000 UTC m=+162.019668348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.138372 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" event={"ID":"fe3569cb-a99a-4e1a-8b5f-124e68276330","Type":"ContainerStarted","Data":"864765c57ac5bb579bbf6c6a7f81251614519316fb5228113ddc0ca0996677de"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.148460 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lskhl" event={"ID":"e82ee5bd-9dd0-4d09-a629-21a1d08dbb06","Type":"ContainerStarted","Data":"a521e040ff5b23440ed0c4e30d5f7993f7b2027f8f8f426c36751c4199341345"} Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.168923 4576 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mrptv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.168978 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.208363 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-qv4qp" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.233024 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.234214 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.734195034 +0000 UTC m=+162.120172008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.335259 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.335761 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.835748121 +0000 UTC m=+162.221725105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.365295 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" podStartSLOduration=139.365260885 podStartE2EDuration="2m19.365260885s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.361718949 +0000 UTC m=+161.747695933" watchObservedRunningTime="2025-12-03 08:42:34.365260885 +0000 UTC m=+161.751237869" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.407143 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:34 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:34 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:34 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.407222 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.438875 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.439276 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:34.939255201 +0000 UTC m=+162.325232185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.495668 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lcjhm" podStartSLOduration=138.495646097 podStartE2EDuration="2m18.495646097s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.494895396 +0000 UTC m=+161.880872380" watchObservedRunningTime="2025-12-03 08:42:34.495646097 +0000 UTC m=+161.881623081" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.496114 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" podStartSLOduration=138.496108369 podStartE2EDuration="2m18.496108369s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.453840644 +0000 UTC m=+161.839817628" watchObservedRunningTime="2025-12-03 08:42:34.496108369 +0000 UTC m=+161.882085353" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.540187 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.540498 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.040485581 +0000 UTC m=+162.426462565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.545821 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" podStartSLOduration=138.545802573 podStartE2EDuration="2m18.545802573s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.542953827 +0000 UTC m=+161.928930831" watchObservedRunningTime="2025-12-03 08:42:34.545802573 +0000 UTC m=+161.931779557" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.582194 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p4j4g" podStartSLOduration=138.58217518 podStartE2EDuration="2m18.58217518s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.579455318 +0000 UTC m=+161.965432302" watchObservedRunningTime="2025-12-03 08:42:34.58217518 +0000 UTC m=+161.968152164" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.652374 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.652502 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.152481899 +0000 UTC m=+162.538458883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.652746 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.653119 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.153103216 +0000 UTC m=+162.539080200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.714271 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fhp6" podStartSLOduration=138.714253588 podStartE2EDuration="2m18.714253588s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.627643372 +0000 UTC m=+162.013620346" watchObservedRunningTime="2025-12-03 08:42:34.714253588 +0000 UTC m=+162.100230572" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.716036 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-nsf2s" podStartSLOduration=138.716031776 podStartE2EDuration="2m18.716031776s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.712241864 +0000 UTC m=+162.098218848" watchObservedRunningTime="2025-12-03 08:42:34.716031776 +0000 UTC m=+162.102008760" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.754284 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.754485 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.254441727 +0000 UTC m=+162.640418711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.754670 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.754984 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.254971602 +0000 UTC m=+162.640948586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.836809 4576 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-9825w container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.836901 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" podUID="b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.856035 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.856477 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.356457347 +0000 UTC m=+162.742434331 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.875319 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" podStartSLOduration=139.875305474 podStartE2EDuration="2m19.875305474s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.873406813 +0000 UTC m=+162.259383797" watchObservedRunningTime="2025-12-03 08:42:34.875305474 +0000 UTC m=+162.261282448" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.875868 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-88mqt" podStartSLOduration=138.875861649 podStartE2EDuration="2m18.875861649s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.834782576 +0000 UTC m=+162.220759550" watchObservedRunningTime="2025-12-03 08:42:34.875861649 +0000 UTC m=+162.261838633" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.897428 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-sjsrz" podStartSLOduration=138.897409428 podStartE2EDuration="2m18.897409428s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.895928148 +0000 UTC m=+162.281905132" watchObservedRunningTime="2025-12-03 08:42:34.897409428 +0000 UTC m=+162.283386402" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.952571 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-brdzq" podStartSLOduration=10.952549099 podStartE2EDuration="10.952549099s" podCreationTimestamp="2025-12-03 08:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.952118328 +0000 UTC m=+162.338095312" watchObservedRunningTime="2025-12-03 08:42:34.952549099 +0000 UTC m=+162.338526083" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.953582 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-8rpwp" podStartSLOduration=138.953576467 podStartE2EDuration="2m18.953576467s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.917941109 +0000 UTC m=+162.303918083" watchObservedRunningTime="2025-12-03 08:42:34.953576467 +0000 UTC m=+162.339553451" Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.958925 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:34 crc kubenswrapper[4576]: E1203 08:42:34.959350 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.459336461 +0000 UTC m=+162.845313445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:34 crc kubenswrapper[4576]: I1203 08:42:34.973770 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vbnmn" podStartSLOduration=138.973753668 podStartE2EDuration="2m18.973753668s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:34.972427932 +0000 UTC m=+162.358404916" watchObservedRunningTime="2025-12-03 08:42:34.973753668 +0000 UTC m=+162.359730652" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.005744 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-28lz2" podStartSLOduration=11.005724687 podStartE2EDuration="11.005724687s" podCreationTimestamp="2025-12-03 08:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:35.004381421 +0000 UTC m=+162.390358405" watchObservedRunningTime="2025-12-03 08:42:35.005724687 +0000 UTC m=+162.391701671" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.038200 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v4pcv" podStartSLOduration=139.038179439 podStartE2EDuration="2m19.038179439s" podCreationTimestamp="2025-12-03 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:35.036102413 +0000 UTC m=+162.422079397" watchObservedRunningTime="2025-12-03 08:42:35.038179439 +0000 UTC m=+162.424156423" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.059857 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.060188 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.5601701 +0000 UTC m=+162.946147084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.161288 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.162692 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.662670943 +0000 UTC m=+163.048647927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.176195 4576 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mrptv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.176253 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.176349 4576 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-x5pdd container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.176377 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" podUID="098beae0-8e26-45b7-bf18-cf6e4c83d7c7" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.265346 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.267150 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.767126099 +0000 UTC m=+163.153103083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.367439 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.367979 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.867960987 +0000 UTC m=+163.253937981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.404273 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:35 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.404368 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.410800 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.411910 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.414585 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.414808 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.441173 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.469389 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.469799 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:35.969774151 +0000 UTC m=+163.355751135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.574726 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.575170 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.575467 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.576653 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.076595711 +0000 UTC m=+163.462572725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.602664 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.603780 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.609971 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.624030 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.680758 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.681291 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.181252481 +0000 UTC m=+163.567229475 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.681672 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.681940 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.681857 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.682103 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.682305 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v5th\" (UniqueName: \"kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.682460 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.682517 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.182497075 +0000 UTC m=+163.568474059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.682721 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.687111 4576 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-9825w container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]log ok Dec 03 08:42:35 crc kubenswrapper[4576]: [-]poststarthook/generic-apiserver-start-informers failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/max-in-flight-filter ok Dec 03 08:42:35 crc kubenswrapper[4576]: [-]poststarthook/storage-object-count-tracker-hook failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.687188 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" podUID="b3bdb0c8-1a8b-49fe-af23-0704abbeb3c2" containerName="packageserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.708854 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.709138 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.714336 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.738774 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.784393 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.784614 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.284584587 +0000 UTC m=+163.670561571 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785094 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785137 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785166 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v5th\" (UniqueName: \"kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785235 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.785495 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.285472231 +0000 UTC m=+163.671449215 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785906 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.785959 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.799807 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.801465 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.807810 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.814146 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.814191 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.815198 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.833124 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.835875 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v5th\" (UniqueName: \"kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th\") pod \"certified-operators-gmpkg\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.874648 4576 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-x5pdd container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.874721 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" podUID="098beae0-8e26-45b7-bf18-cf6e4c83d7c7" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.886301 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.886723 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s8vg\" (UniqueName: \"kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.886805 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.886882 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.887050 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.387036788 +0000 UTC m=+163.773013762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.917087 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.917155 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.917219 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.917148 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.921631 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.953871 4576 patch_prober.go:28] interesting pod/apiserver-76f77b778f-t7p55 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]log ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]etcd ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/generic-apiserver-start-informers ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/max-in-flight-filter ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 03 08:42:35 crc kubenswrapper[4576]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/project.openshift.io-projectcache ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/openshift.io-startinformers ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 03 08:42:35 crc kubenswrapper[4576]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 03 08:42:35 crc kubenswrapper[4576]: livez check failed Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.954650 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" podUID="fe3569cb-a99a-4e1a-8b5f-124e68276330" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.997274 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.997400 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s8vg\" (UniqueName: \"kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.997429 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.997491 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: E1203 08:42:35.998710 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.498691178 +0000 UTC m=+163.884668252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.998862 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:35 crc kubenswrapper[4576]: I1203 08:42:35.999099 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.032566 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.033986 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.049956 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.062300 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s8vg\" (UniqueName: \"kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg\") pod \"community-operators-rxb6h\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.098253 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.098551 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.598519669 +0000 UTC m=+163.984496653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.098935 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.099381 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.599351261 +0000 UTC m=+163.985328235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.117193 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.190227 4576 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-x5pdd container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.191388 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" podUID="098beae0-8e26-45b7-bf18-cf6e4c83d7c7" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.201183 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.201429 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7njkb\" (UniqueName: \"kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.201490 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.201549 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.201681 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.70166575 +0000 UTC m=+164.087642744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.215586 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" event={"ID":"825049cb-3413-4d9b-81bb-7210dc51a05f","Type":"ContainerStarted","Data":"9ef1ad562a5caf9f606749da2cf6347e93d7dcea4b9607f8c9a63f3777f99adb"} Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.223769 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.241749 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.242741 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-74x2q" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.272053 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.284130 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-x5pdd" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.304425 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.304548 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7njkb\" (UniqueName: \"kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.304698 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.304758 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.307498 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.807482382 +0000 UTC m=+164.193459366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.307701 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.308864 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.454332 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:36 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:36 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:36 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.454402 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.459238 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.459416 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.459549 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.459588 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2jjs\" (UniqueName: \"kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.460296 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:36.960277586 +0000 UTC m=+164.346254570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.460838 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.469653 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.469790 4576 patch_prober.go:28] interesting pod/console-f9d7485db-h7ncw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.469827 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-h7ncw" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.485815 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7njkb\" (UniqueName: \"kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb\") pod \"certified-operators-hmmm5\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.628108 4576 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.656466 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.656598 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.656679 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.656709 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2jjs\" (UniqueName: \"kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.657301 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.657969 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.157946375 +0000 UTC m=+164.543923369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.658370 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.658661 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.704627 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2jjs\" (UniqueName: \"kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs\") pod \"community-operators-kqqpt\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.782114 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.782516 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.282499111 +0000 UTC m=+164.668476085 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.782606 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.783787 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.283779185 +0000 UTC m=+164.669756169 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.884041 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:36 crc kubenswrapper[4576]: E1203 08:42:36.884445 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.384428529 +0000 UTC m=+164.770405513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:36 crc kubenswrapper[4576]: I1203 08:42:36.927189 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.075789 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:37 crc kubenswrapper[4576]: E1203 08:42:37.076190 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.576176269 +0000 UTC m=+164.962153253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.178321 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:37 crc kubenswrapper[4576]: E1203 08:42:37.178664 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.678644111 +0000 UTC m=+165.064621095 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.201671 4576 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T08:42:36.628155855Z","Handler":null,"Name":""} Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.266925 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:42:37 crc kubenswrapper[4576]: W1203 08:42:37.270952 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod711712fe_5398_42c5_bff6_d8f984c47764.slice/crio-6273fcb9889efac38a16b6f16d51c7f365a440316c1894ad17942f3595d4fd6b WatchSource:0}: Error finding container 6273fcb9889efac38a16b6f16d51c7f365a440316c1894ad17942f3595d4fd6b: Status 404 returned error can't find the container with id 6273fcb9889efac38a16b6f16d51c7f365a440316c1894ad17942f3595d4fd6b Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.281870 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:37 crc kubenswrapper[4576]: E1203 08:42:37.282261 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 08:42:37.782245194 +0000 UTC m=+165.168222178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hg98m" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.316884 4576 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.316947 4576 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.346258 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" event={"ID":"825049cb-3413-4d9b-81bb-7210dc51a05f","Type":"ContainerStarted","Data":"f91c5a50d09cd2c568051001dab409cddb963881f4948431660edc9dc26cea8c"} Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.365765 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.386121 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.402877 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.427834 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:37 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:37 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:37 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.427875 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.483047 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.491745 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.504444 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.541650 4576 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.541692 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.565052 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.654471 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.700377 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.706094 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.713951 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9825w" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.830502 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.868848 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.877143 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 08:42:37 crc kubenswrapper[4576]: I1203 08:42:37.909595 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.012410 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.012711 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwj2f\" (UniqueName: \"kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.012851 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.076841 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hg98m\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.114066 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.114258 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwj2f\" (UniqueName: \"kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.114692 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.114820 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.115127 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.147374 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwj2f\" (UniqueName: \"kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f\") pod \"redhat-marketplace-lcs7f\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.198893 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.200588 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.228501 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.280680 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.317699 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.318330 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.318467 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4zqw\" (UniqueName: \"kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.353847 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.381934 4576 generic.go:334] "Generic (PLEG): container finished" podID="430635c4-315a-4af6-83d9-3e4c7407266d" containerID="b420d506ac1476af64f3d2f9932966f9f6d4854267101d67d6593bee8039fb5a" exitCode=0 Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.382260 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerDied","Data":"b420d506ac1476af64f3d2f9932966f9f6d4854267101d67d6593bee8039fb5a"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.383165 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerStarted","Data":"a292ba03fa232640895072b9744e16154b79b5b5e6bb66b7f8b7551dd708ca6b"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.387084 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.390179 4576 generic.go:334] "Generic (PLEG): container finished" podID="711712fe-5398-42c5-bff6-d8f984c47764" containerID="8ac9ac909f87dce2f70dce3d8c6fd046aa4d76aed83980890cc4041697630274" exitCode=0 Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.390349 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerDied","Data":"8ac9ac909f87dce2f70dce3d8c6fd046aa4d76aed83980890cc4041697630274"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.390438 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerStarted","Data":"6273fcb9889efac38a16b6f16d51c7f365a440316c1894ad17942f3595d4fd6b"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.403892 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1096b16a-a819-479d-9739-12747402df0c","Type":"ContainerStarted","Data":"a3fc8335bd160d1186d09ca2b8b9c3b7d38ec17755e8ceabe25c50d1022157b2"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.403940 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1096b16a-a819-479d-9739-12747402df0c","Type":"ContainerStarted","Data":"8ef46eacd9491b35953458df2f41ac662c60efadb2c03dcb8eb3d9808a3c8b43"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.419893 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.419963 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4zqw\" (UniqueName: \"kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.420046 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.421149 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.421538 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.423706 4576 generic.go:334] "Generic (PLEG): container finished" podID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerID="4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec" exitCode=0 Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.423780 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerDied","Data":"4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.423804 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerStarted","Data":"3ecdfa377a57f882efe8b716f120bff3376709ebe14fb7d0053405ad27e3b697"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.448810 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:38 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:38 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:38 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.448862 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.457583 4576 generic.go:334] "Generic (PLEG): container finished" podID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerID="f79586934c1abd195376e844447549c7a1844f8fd29abb9857809ed353f3b1f0" exitCode=0 Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.457746 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerDied","Data":"f79586934c1abd195376e844447549c7a1844f8fd29abb9857809ed353f3b1f0"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.457786 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerStarted","Data":"c0608a5294a516da817a3f1587a29083e6f231aa4391f16467fc7b9c32863ae4"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.484652 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4zqw\" (UniqueName: \"kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw\") pod \"redhat-marketplace-q5qf5\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.486462 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.486440618 podStartE2EDuration="3.486440618s" podCreationTimestamp="2025-12-03 08:42:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:38.484363312 +0000 UTC m=+165.870340296" watchObservedRunningTime="2025-12-03 08:42:38.486440618 +0000 UTC m=+165.872417602" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.544250 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" event={"ID":"825049cb-3413-4d9b-81bb-7210dc51a05f","Type":"ContainerStarted","Data":"8c21c88efff9fc4d6556098123cfd3677e6fe5d9c19962ef8353d419d2a8ec25"} Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.617187 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9xtqh" podStartSLOduration=14.617167369 podStartE2EDuration="14.617167369s" podCreationTimestamp="2025-12-03 08:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:38.615002592 +0000 UTC m=+166.000979606" watchObservedRunningTime="2025-12-03 08:42:38.617167369 +0000 UTC m=+166.003144353" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.644004 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.662835 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:42:38 crc kubenswrapper[4576]: W1203 08:42:38.729915 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb420867b_5c27_4b98_a2ab_0dd31175f5c0.slice/crio-ed60e516e9b3106b79aad8e7a8553d841d64085db01212e46ca27e1134ea6f35 WatchSource:0}: Error finding container ed60e516e9b3106b79aad8e7a8553d841d64085db01212e46ca27e1134ea6f35: Status 404 returned error can't find the container with id ed60e516e9b3106b79aad8e7a8553d841d64085db01212e46ca27e1134ea6f35 Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.798571 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.799506 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.811373 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.814418 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.938726 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.938801 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:38 crc kubenswrapper[4576]: I1203 08:42:38.939634 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxwd6\" (UniqueName: \"kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.002407 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.040424 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.040777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.040804 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxwd6\" (UniqueName: \"kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.040889 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.041160 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.064438 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxwd6\" (UniqueName: \"kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6\") pod \"redhat-operators-2ngh2\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.082952 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:42:39 crc kubenswrapper[4576]: W1203 08:42:39.092426 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21e70344_efa0_4c1b_a490_a52a1ffcdd13.slice/crio-2ff9523a9258ba380e3eeb8abd4fbbf2bc6d9dcb446c4bb16fea3af49711b6a0 WatchSource:0}: Error finding container 2ff9523a9258ba380e3eeb8abd4fbbf2bc6d9dcb446c4bb16fea3af49711b6a0: Status 404 returned error can't find the container with id 2ff9523a9258ba380e3eeb8abd4fbbf2bc6d9dcb446c4bb16fea3af49711b6a0 Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.129421 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.180904 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.181917 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.203580 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.217021 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.244815 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.244867 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.244904 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmhr\" (UniqueName: \"kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.345732 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmhr\" (UniqueName: \"kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.346156 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.346198 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.347089 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.347247 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.393716 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmhr\" (UniqueName: \"kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr\") pod \"redhat-operators-66ngh\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.415105 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:39 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:39 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:39 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.415214 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.607100 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.675374 4576 generic.go:334] "Generic (PLEG): container finished" podID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerID="c1a0d473ea305aa09ae8bb7de6e8b2a02947bfd732af990b453fcb25e72102ab" exitCode=0 Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.830910 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.830973 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.836197 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.855042 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c87be72e-a53b-42c9-bb32-f56cd0febe24-metrics-certs\") pod \"network-metrics-daemon-pw7pk\" (UID: \"c87be72e-a53b-42c9-bb32-f56cd0febe24\") " pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.855599 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerDied","Data":"c1a0d473ea305aa09ae8bb7de6e8b2a02947bfd732af990b453fcb25e72102ab"} Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.855655 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerStarted","Data":"2ff9523a9258ba380e3eeb8abd4fbbf2bc6d9dcb446c4bb16fea3af49711b6a0"} Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.855681 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.857209 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.911913 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.912241 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.915209 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pw7pk" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.949296 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.949380 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.960094 4576 generic.go:334] "Generic (PLEG): container finished" podID="1096b16a-a819-479d-9739-12747402df0c" containerID="a3fc8335bd160d1186d09ca2b8b9c3b7d38ec17755e8ceabe25c50d1022157b2" exitCode=0 Dec 03 08:42:39 crc kubenswrapper[4576]: I1203 08:42:39.979834 4576 generic.go:334] "Generic (PLEG): container finished" podID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerID="a4189fe76020d6fdfdf95244ddbbc7f4284b5a5e5a7d064f5a1920529caf751b" exitCode=0 Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.045317 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" podStartSLOduration=145.045296919 podStartE2EDuration="2m25.045296919s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:40.038952639 +0000 UTC m=+167.424929653" watchObservedRunningTime="2025-12-03 08:42:40.045296919 +0000 UTC m=+167.431273903" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.050401 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.050806 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.052924 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.076155 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103800 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103840 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" event={"ID":"d9c7dbff-42f0-43fe-bf81-e539cb523a77","Type":"ContainerStarted","Data":"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d"} Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103865 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103883 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" event={"ID":"d9c7dbff-42f0-43fe-bf81-e539cb523a77","Type":"ContainerStarted","Data":"1aeb0e0415c9363c96efae1056c6831602c8f1fdb04af4b33dfdfc17f1399ec1"} Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103906 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1096b16a-a819-479d-9739-12747402df0c","Type":"ContainerDied","Data":"a3fc8335bd160d1186d09ca2b8b9c3b7d38ec17755e8ceabe25c50d1022157b2"} Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103922 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerDied","Data":"a4189fe76020d6fdfdf95244ddbbc7f4284b5a5e5a7d064f5a1920529caf751b"} Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103932 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerStarted","Data":"ed60e516e9b3106b79aad8e7a8553d841d64085db01212e46ca27e1134ea6f35"} Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.103943 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.291243 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.413869 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.414107 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:40 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:40 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:40 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.414140 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:40 crc kubenswrapper[4576]: W1203 08:42:40.440003 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode48f6c7a_7a1c_41aa_8f03_93df84b64d03.slice/crio-66ab2e1e70b6d38b4c8957cbc33f23ceed10f6ff3bbe7870177fc5fda13680ee WatchSource:0}: Error finding container 66ab2e1e70b6d38b4c8957cbc33f23ceed10f6ff3bbe7870177fc5fda13680ee: Status 404 returned error can't find the container with id 66ab2e1e70b6d38b4c8957cbc33f23ceed10f6ff3bbe7870177fc5fda13680ee Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.553048 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pw7pk"] Dec 03 08:42:40 crc kubenswrapper[4576]: W1203 08:42:40.655812 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc87be72e_a53b_42c9_bb32_f56cd0febe24.slice/crio-baa50bc1b20f15586a3de7be758f1829ff8712f301104b240cb365bba854b7ce WatchSource:0}: Error finding container baa50bc1b20f15586a3de7be758f1829ff8712f301104b240cb365bba854b7ce: Status 404 returned error can't find the container with id baa50bc1b20f15586a3de7be758f1829ff8712f301104b240cb365bba854b7ce Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.715832 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.721053 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t7p55" Dec 03 08:42:40 crc kubenswrapper[4576]: I1203 08:42:40.884982 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.085899 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerStarted","Data":"66ab2e1e70b6d38b4c8957cbc33f23ceed10f6ff3bbe7870177fc5fda13680ee"} Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.107918 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a47adf5b-ca22-4605-9812-a159b2cf6670","Type":"ContainerStarted","Data":"3dc3201cbf48a07e595ef45f5f2d736f0653228681aa91c6b1d3bd04d3ce2a68"} Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.122697 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" event={"ID":"c87be72e-a53b-42c9-bb32-f56cd0febe24","Type":"ContainerStarted","Data":"baa50bc1b20f15586a3de7be758f1829ff8712f301104b240cb365bba854b7ce"} Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.129383 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerStarted","Data":"8a42f3c93dab6f4025b6d4aa8c1cc5ef143652c7d2bc0645f9577cb372331976"} Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.440775 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:41 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:41 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:41 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:41 crc kubenswrapper[4576]: I1203 08:42:41.440851 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.079023 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.172081 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access\") pod \"1096b16a-a819-479d-9739-12747402df0c\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.172214 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir\") pod \"1096b16a-a819-479d-9739-12747402df0c\" (UID: \"1096b16a-a819-479d-9739-12747402df0c\") " Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.172891 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1096b16a-a819-479d-9739-12747402df0c" (UID: "1096b16a-a819-479d-9739-12747402df0c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.190779 4576 generic.go:334] "Generic (PLEG): container finished" podID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerID="f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663" exitCode=0 Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.190878 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerDied","Data":"f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.198221 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a47adf5b-ca22-4605-9812-a159b2cf6670","Type":"ContainerStarted","Data":"3ac02a40581be58b7de0f911bc52366fbf3ed4fa0df508731aba9bf6405e5d3e"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.206330 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" event={"ID":"c87be72e-a53b-42c9-bb32-f56cd0febe24","Type":"ContainerStarted","Data":"96bd51b0d38c1a7ac31c62c904583eaa0f8ea8d8080616b2e1ab80c6a0934386"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.207865 4576 generic.go:334] "Generic (PLEG): container finished" podID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerID="801fecd290425b9c26dc28a1146560999d522975623f6194d4d5f22a7301f2e0" exitCode=0 Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.207919 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerDied","Data":"801fecd290425b9c26dc28a1146560999d522975623f6194d4d5f22a7301f2e0"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.213154 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1096b16a-a819-479d-9739-12747402df0c" (UID: "1096b16a-a819-479d-9739-12747402df0c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.222614 4576 generic.go:334] "Generic (PLEG): container finished" podID="70305d8f-8bdf-437b-ba4d-aaadc408205d" containerID="7427416535c820c63b653125794914811da6781790bd7deb7d82db900b897430" exitCode=0 Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.222683 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" event={"ID":"70305d8f-8bdf-437b-ba4d-aaadc408205d","Type":"ContainerDied","Data":"7427416535c820c63b653125794914811da6781790bd7deb7d82db900b897430"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.238686 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1096b16a-a819-479d-9739-12747402df0c","Type":"ContainerDied","Data":"8ef46eacd9491b35953458df2f41ac662c60efadb2c03dcb8eb3d9808a3c8b43"} Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.238747 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ef46eacd9491b35953458df2f41ac662c60efadb2c03dcb8eb3d9808a3c8b43" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.238826 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.273602 4576 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1096b16a-a819-479d-9739-12747402df0c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.273632 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1096b16a-a819-479d-9739-12747402df0c-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.402278 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:42 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:42 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:42 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.402428 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:42 crc kubenswrapper[4576]: I1203 08:42:42.558653 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-brdzq" Dec 03 08:42:43 crc kubenswrapper[4576]: I1203 08:42:43.381630 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pw7pk" event={"ID":"c87be72e-a53b-42c9-bb32-f56cd0febe24","Type":"ContainerStarted","Data":"9415fc6e8046562fc7625b8d8bab68dcbd8256c36f01261317fde6e2eebae721"} Dec 03 08:42:43 crc kubenswrapper[4576]: I1203 08:42:43.406839 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-pw7pk" podStartSLOduration=148.406822649 podStartE2EDuration="2m28.406822649s" podCreationTimestamp="2025-12-03 08:40:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:42:43.406548042 +0000 UTC m=+170.792525036" watchObservedRunningTime="2025-12-03 08:42:43.406822649 +0000 UTC m=+170.792799633" Dec 03 08:42:43 crc kubenswrapper[4576]: I1203 08:42:43.418186 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:43 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:43 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:43 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:43 crc kubenswrapper[4576]: I1203 08:42:43.418254 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.406197 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:44 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:44 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:44 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.406473 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.417452 4576 generic.go:334] "Generic (PLEG): container finished" podID="a47adf5b-ca22-4605-9812-a159b2cf6670" containerID="3ac02a40581be58b7de0f911bc52366fbf3ed4fa0df508731aba9bf6405e5d3e" exitCode=0 Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.417555 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a47adf5b-ca22-4605-9812-a159b2cf6670","Type":"ContainerDied","Data":"3ac02a40581be58b7de0f911bc52366fbf3ed4fa0df508731aba9bf6405e5d3e"} Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.631100 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.659647 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt85t\" (UniqueName: \"kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t\") pod \"70305d8f-8bdf-437b-ba4d-aaadc408205d\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.659713 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume\") pod \"70305d8f-8bdf-437b-ba4d-aaadc408205d\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.659897 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume\") pod \"70305d8f-8bdf-437b-ba4d-aaadc408205d\" (UID: \"70305d8f-8bdf-437b-ba4d-aaadc408205d\") " Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.660936 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume" (OuterVolumeSpecName: "config-volume") pod "70305d8f-8bdf-437b-ba4d-aaadc408205d" (UID: "70305d8f-8bdf-437b-ba4d-aaadc408205d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.717237 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t" (OuterVolumeSpecName: "kube-api-access-kt85t") pod "70305d8f-8bdf-437b-ba4d-aaadc408205d" (UID: "70305d8f-8bdf-437b-ba4d-aaadc408205d"). InnerVolumeSpecName "kube-api-access-kt85t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.722319 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "70305d8f-8bdf-437b-ba4d-aaadc408205d" (UID: "70305d8f-8bdf-437b-ba4d-aaadc408205d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.761274 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt85t\" (UniqueName: \"kubernetes.io/projected/70305d8f-8bdf-437b-ba4d-aaadc408205d-kube-api-access-kt85t\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.761311 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70305d8f-8bdf-437b-ba4d-aaadc408205d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:44 crc kubenswrapper[4576]: I1203 08:42:44.761324 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70305d8f-8bdf-437b-ba4d-aaadc408205d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.405396 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:45 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:45 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:45 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.405704 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.451600 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" event={"ID":"70305d8f-8bdf-437b-ba4d-aaadc408205d","Type":"ContainerDied","Data":"c0f9efeb14c4f6705b47d478fb0ebecd1dfdfe68ede752e91c389c27597773f4"} Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.451666 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0f9efeb14c4f6705b47d478fb0ebecd1dfdfe68ede752e91c389c27597773f4" Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.451898 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6" Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.915783 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.915878 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.921374 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:45 crc kubenswrapper[4576]: I1203 08:42:45.921444 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.203188 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.304848 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access\") pod \"a47adf5b-ca22-4605-9812-a159b2cf6670\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.304897 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir\") pod \"a47adf5b-ca22-4605-9812-a159b2cf6670\" (UID: \"a47adf5b-ca22-4605-9812-a159b2cf6670\") " Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.305218 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a47adf5b-ca22-4605-9812-a159b2cf6670" (UID: "a47adf5b-ca22-4605-9812-a159b2cf6670"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.333109 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a47adf5b-ca22-4605-9812-a159b2cf6670" (UID: "a47adf5b-ca22-4605-9812-a159b2cf6670"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.400363 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:46 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:46 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:46 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.400660 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.413960 4576 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a47adf5b-ca22-4605-9812-a159b2cf6670-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.413997 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a47adf5b-ca22-4605-9812-a159b2cf6670-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.451016 4576 patch_prober.go:28] interesting pod/console-f9d7485db-h7ncw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.451079 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-h7ncw" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.483460 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a47adf5b-ca22-4605-9812-a159b2cf6670","Type":"ContainerDied","Data":"3dc3201cbf48a07e595ef45f5f2d736f0653228681aa91c6b1d3bd04d3ce2a68"} Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.483507 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dc3201cbf48a07e595ef45f5f2d736f0653228681aa91c6b1d3bd04d3ce2a68" Dec 03 08:42:46 crc kubenswrapper[4576]: I1203 08:42:46.483589 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 08:42:47 crc kubenswrapper[4576]: I1203 08:42:47.402320 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:47 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:47 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:47 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:47 crc kubenswrapper[4576]: I1203 08:42:47.403021 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:48 crc kubenswrapper[4576]: I1203 08:42:48.399953 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:48 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:48 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:48 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:48 crc kubenswrapper[4576]: I1203 08:42:48.400017 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:49 crc kubenswrapper[4576]: I1203 08:42:49.416104 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:49 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:49 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:49 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:49 crc kubenswrapper[4576]: I1203 08:42:49.416663 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:50 crc kubenswrapper[4576]: I1203 08:42:50.483761 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:50 crc kubenswrapper[4576]: [-]has-synced failed: reason withheld Dec 03 08:42:50 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:50 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:50 crc kubenswrapper[4576]: I1203 08:42:50.483832 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:51 crc kubenswrapper[4576]: I1203 08:42:51.404619 4576 patch_prober.go:28] interesting pod/router-default-5444994796-glrgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 08:42:51 crc kubenswrapper[4576]: [+]has-synced ok Dec 03 08:42:51 crc kubenswrapper[4576]: [+]process-running ok Dec 03 08:42:51 crc kubenswrapper[4576]: healthz check failed Dec 03 08:42:51 crc kubenswrapper[4576]: I1203 08:42:51.404690 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glrgq" podUID="74ee01ca-ad15-4a8f-8c72-0daf093db217" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 08:42:52 crc kubenswrapper[4576]: I1203 08:42:52.409868 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:52 crc kubenswrapper[4576]: I1203 08:42:52.413627 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-glrgq" Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.914921 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.915303 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.915384 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.916039 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"d5aa7b0f14591d541544d125355c26efbf5ed950d2c86096fbbf0b32dc5e8bb1"} pod="openshift-console/downloads-7954f5f757-l27nx" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.916194 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" containerID="cri-o://d5aa7b0f14591d541544d125355c26efbf5ed950d2c86096fbbf0b32dc5e8bb1" gracePeriod=2 Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.917411 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.917461 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.917696 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:42:55 crc kubenswrapper[4576]: I1203 08:42:55.917716 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:42:56 crc kubenswrapper[4576]: I1203 08:42:56.446705 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:56 crc kubenswrapper[4576]: I1203 08:42:56.450710 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 08:42:56 crc kubenswrapper[4576]: I1203 08:42:56.898733 4576 generic.go:334] "Generic (PLEG): container finished" podID="f9d3808c-11ef-421d-83e6-b909679c5490" containerID="d5aa7b0f14591d541544d125355c26efbf5ed950d2c86096fbbf0b32dc5e8bb1" exitCode=0 Dec 03 08:42:56 crc kubenswrapper[4576]: I1203 08:42:56.898783 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l27nx" event={"ID":"f9d3808c-11ef-421d-83e6-b909679c5490","Type":"ContainerDied","Data":"d5aa7b0f14591d541544d125355c26efbf5ed950d2c86096fbbf0b32dc5e8bb1"} Dec 03 08:42:58 crc kubenswrapper[4576]: I1203 08:42:58.361712 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:43:05 crc kubenswrapper[4576]: I1203 08:43:05.915827 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:05 crc kubenswrapper[4576]: I1203 08:43:05.916390 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:07 crc kubenswrapper[4576]: I1203 08:43:07.379304 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xbbn2" Dec 03 08:43:09 crc kubenswrapper[4576]: I1203 08:43:09.680322 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:43:09 crc kubenswrapper[4576]: I1203 08:43:09.680987 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.490805 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 08:43:13 crc kubenswrapper[4576]: E1203 08:43:13.491946 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1096b16a-a819-479d-9739-12747402df0c" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.491974 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="1096b16a-a819-479d-9739-12747402df0c" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: E1203 08:43:13.492009 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70305d8f-8bdf-437b-ba4d-aaadc408205d" containerName="collect-profiles" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.492022 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70305d8f-8bdf-437b-ba4d-aaadc408205d" containerName="collect-profiles" Dec 03 08:43:13 crc kubenswrapper[4576]: E1203 08:43:13.492033 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47adf5b-ca22-4605-9812-a159b2cf6670" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.492040 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47adf5b-ca22-4605-9812-a159b2cf6670" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.492284 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a47adf5b-ca22-4605-9812-a159b2cf6670" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.492304 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70305d8f-8bdf-437b-ba4d-aaadc408205d" containerName="collect-profiles" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.492317 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="1096b16a-a819-479d-9739-12747402df0c" containerName="pruner" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.493431 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.496997 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.497319 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.509316 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.575542 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.575606 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.677046 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.677131 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.677236 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.712510 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:13 crc kubenswrapper[4576]: I1203 08:43:13.817784 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:15 crc kubenswrapper[4576]: I1203 08:43:15.915826 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:15 crc kubenswrapper[4576]: I1203 08:43:15.916736 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.068663 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.069810 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.075852 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.208023 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.208090 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.208128 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.309670 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.309727 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.309754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.309849 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.309924 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.333304 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access\") pod \"installer-9-crc\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: I1203 08:43:19.391472 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:43:19 crc kubenswrapper[4576]: E1203 08:43:19.565078 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 08:43:19 crc kubenswrapper[4576]: E1203 08:43:19.565493 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2jjs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-kqqpt_openshift-marketplace(430635c4-315a-4af6-83d9-3e4c7407266d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:19 crc kubenswrapper[4576]: E1203 08:43:19.566711 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-kqqpt" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.132487 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-kqqpt" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.540929 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.541380 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7njkb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hmmm5_openshift-marketplace(558ce81e-ebdc-4b7d-bd04-a3b485857825): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.543399 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hmmm5" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.562790 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.563299 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5v5th,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gmpkg_openshift-marketplace(b040fd94-da16-41e4-8fdf-66b7a8e05d87): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.564511 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gmpkg" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.672918 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.673347 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9s8vg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rxb6h_openshift-marketplace(711712fe-5398-42c5-bff6-d8f984c47764): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.678387 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rxb6h" podUID="711712fe-5398-42c5-bff6-d8f984c47764" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.860952 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.861359 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rwj2f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-lcs7f_openshift-marketplace(b420867b-5c27-4b98-a2ab-0dd31175f5c0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.862910 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-lcs7f" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" Dec 03 08:43:22 crc kubenswrapper[4576]: I1203 08:43:22.864901 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 08:43:22 crc kubenswrapper[4576]: W1203 08:43:22.906509 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod729366d7_b81d_4984_9279_bfa7d2b9e99d.slice/crio-0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3 WatchSource:0}: Error finding container 0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3: Status 404 returned error can't find the container with id 0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3 Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.906636 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.906764 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fxwd6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-2ngh2_openshift-marketplace(701bd1ac-86c3-48f7-b195-490b5e187f00): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 08:43:22 crc kubenswrapper[4576]: E1203 08:43:22.912235 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-2ngh2" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" Dec 03 08:43:22 crc kubenswrapper[4576]: I1203 08:43:22.928473 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.104706 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8966b99c-3a63-452f-9150-dd0548ecd89a","Type":"ContainerStarted","Data":"734fbc47950b0a2096160c2ea264b83c9234815b0eac488e7b6e859ebb510a80"} Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.112366 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l27nx" event={"ID":"f9d3808c-11ef-421d-83e6-b909679c5490","Type":"ContainerStarted","Data":"87dc293df06ccae425acec014da376f50557f4b683483a7a4d902c039b8fa78d"} Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.116884 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.116978 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.117005 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.141992 4576 generic.go:334] "Generic (PLEG): container finished" podID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerID="9a93f020d88f2444caea124a63d1138df45ee7cc984a60a23f944802cd6d525c" exitCode=0 Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.142070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerDied","Data":"9a93f020d88f2444caea124a63d1138df45ee7cc984a60a23f944802cd6d525c"} Dec 03 08:43:23 crc kubenswrapper[4576]: E1203 08:43:23.171878 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-lcs7f" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" Dec 03 08:43:23 crc kubenswrapper[4576]: E1203 08:43:23.172163 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gmpkg" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" Dec 03 08:43:23 crc kubenswrapper[4576]: E1203 08:43:23.172504 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rxb6h" podUID="711712fe-5398-42c5-bff6-d8f984c47764" Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.165457 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerStarted","Data":"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce"} Dec 03 08:43:23 crc kubenswrapper[4576]: I1203 08:43:23.173734 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"729366d7-b81d-4984-9279-bfa7d2b9e99d","Type":"ContainerStarted","Data":"0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3"} Dec 03 08:43:23 crc kubenswrapper[4576]: E1203 08:43:23.183726 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-2ngh2" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" Dec 03 08:43:23 crc kubenswrapper[4576]: E1203 08:43:23.184074 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hmmm5" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.177639 4576 generic.go:334] "Generic (PLEG): container finished" podID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerID="29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce" exitCode=0 Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.178124 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerDied","Data":"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce"} Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.181885 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"729366d7-b81d-4984-9279-bfa7d2b9e99d","Type":"ContainerStarted","Data":"622ead11fd81064c3099d083f2324e7fb7c5c7ff3e66a2a205db0a240b50c436"} Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.185820 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8966b99c-3a63-452f-9150-dd0548ecd89a","Type":"ContainerStarted","Data":"c272629df5972b840b34cb30ec78934ee0de9bc8b75ad2e3aaaf0f51676d940a"} Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.186429 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:24 crc kubenswrapper[4576]: I1203 08:43:24.186487 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.199189 4576 generic.go:334] "Generic (PLEG): container finished" podID="729366d7-b81d-4984-9279-bfa7d2b9e99d" containerID="622ead11fd81064c3099d083f2324e7fb7c5c7ff3e66a2a205db0a240b50c436" exitCode=0 Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.199273 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"729366d7-b81d-4984-9279-bfa7d2b9e99d","Type":"ContainerDied","Data":"622ead11fd81064c3099d083f2324e7fb7c5c7ff3e66a2a205db0a240b50c436"} Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.202108 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.202306 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.252465 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.252410532 podStartE2EDuration="6.252410532s" podCreationTimestamp="2025-12-03 08:43:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:43:25.237639024 +0000 UTC m=+212.623616028" watchObservedRunningTime="2025-12-03 08:43:25.252410532 +0000 UTC m=+212.638387516" Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.601347 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.915074 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.915107 4576 patch_prober.go:28] interesting pod/downloads-7954f5f757-l27nx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.915164 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:25 crc kubenswrapper[4576]: I1203 08:43:25.915154 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l27nx" podUID="f9d3808c-11ef-421d-83e6-b909679c5490" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.470590 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.560856 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access\") pod \"729366d7-b81d-4984-9279-bfa7d2b9e99d\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.560942 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir\") pod \"729366d7-b81d-4984-9279-bfa7d2b9e99d\" (UID: \"729366d7-b81d-4984-9279-bfa7d2b9e99d\") " Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.561105 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "729366d7-b81d-4984-9279-bfa7d2b9e99d" (UID: "729366d7-b81d-4984-9279-bfa7d2b9e99d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.568237 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "729366d7-b81d-4984-9279-bfa7d2b9e99d" (UID: "729366d7-b81d-4984-9279-bfa7d2b9e99d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.662519 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/729366d7-b81d-4984-9279-bfa7d2b9e99d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:26 crc kubenswrapper[4576]: I1203 08:43:26.662606 4576 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/729366d7-b81d-4984-9279-bfa7d2b9e99d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:27 crc kubenswrapper[4576]: I1203 08:43:27.211825 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"729366d7-b81d-4984-9279-bfa7d2b9e99d","Type":"ContainerDied","Data":"0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3"} Dec 03 08:43:27 crc kubenswrapper[4576]: I1203 08:43:27.211875 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dc39d57734ddf091f1c65cf255fde84d3929f995c127c16cea55e46a6d5cbc3" Dec 03 08:43:27 crc kubenswrapper[4576]: I1203 08:43:27.211908 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 08:43:28 crc kubenswrapper[4576]: I1203 08:43:28.219828 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerStarted","Data":"01bb8a9a9752e8e14258c09bf94c0f1f484f65fe62dff63b464e559ad0069800"} Dec 03 08:43:28 crc kubenswrapper[4576]: I1203 08:43:28.240391 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q5qf5" podStartSLOduration=3.120726073 podStartE2EDuration="50.240363848s" podCreationTimestamp="2025-12-03 08:42:38 +0000 UTC" firstStartedPulling="2025-12-03 08:42:39.85396362 +0000 UTC m=+167.239940594" lastFinishedPulling="2025-12-03 08:43:26.973601385 +0000 UTC m=+214.359578369" observedRunningTime="2025-12-03 08:43:28.235438511 +0000 UTC m=+215.621415515" watchObservedRunningTime="2025-12-03 08:43:28.240363848 +0000 UTC m=+215.626340832" Dec 03 08:43:28 crc kubenswrapper[4576]: I1203 08:43:28.645349 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:28 crc kubenswrapper[4576]: I1203 08:43:28.645413 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:29 crc kubenswrapper[4576]: I1203 08:43:29.746950 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-q5qf5" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="registry-server" probeResult="failure" output=< Dec 03 08:43:29 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 08:43:29 crc kubenswrapper[4576]: > Dec 03 08:43:30 crc kubenswrapper[4576]: I1203 08:43:30.233363 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerStarted","Data":"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d"} Dec 03 08:43:30 crc kubenswrapper[4576]: I1203 08:43:30.256726 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-66ngh" podStartSLOduration=4.706635423 podStartE2EDuration="51.256700411s" podCreationTimestamp="2025-12-03 08:42:39 +0000 UTC" firstStartedPulling="2025-12-03 08:42:42.194206768 +0000 UTC m=+169.580183752" lastFinishedPulling="2025-12-03 08:43:28.744271756 +0000 UTC m=+216.130248740" observedRunningTime="2025-12-03 08:43:30.254160295 +0000 UTC m=+217.640137309" watchObservedRunningTime="2025-12-03 08:43:30.256700411 +0000 UTC m=+217.642677395" Dec 03 08:43:35 crc kubenswrapper[4576]: I1203 08:43:35.920485 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-l27nx" Dec 03 08:43:38 crc kubenswrapper[4576]: I1203 08:43:38.879676 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:38 crc kubenswrapper[4576]: I1203 08:43:38.951081 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.609833 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.609898 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.685439 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.686403 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.707553 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.707588 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.708178 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:43:39 crc kubenswrapper[4576]: I1203 08:43:39.708257 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290" gracePeriod=600 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.459544 4576 generic.go:334] "Generic (PLEG): container finished" podID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerID="29058e7ff8417fd1ab3e2bd682d6777a0f1cc9a72717445e00cfafb72b93ef49" exitCode=0 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.459654 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerDied","Data":"29058e7ff8417fd1ab3e2bd682d6777a0f1cc9a72717445e00cfafb72b93ef49"} Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.466481 4576 generic.go:334] "Generic (PLEG): container finished" podID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerID="7d0c6b32c01e007486fd10b40977174d8c807cc402a50c967b5f60814e289130" exitCode=0 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.466621 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerDied","Data":"7d0c6b32c01e007486fd10b40977174d8c807cc402a50c967b5f60814e289130"} Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.469001 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290" exitCode=0 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.469072 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290"} Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.506605 4576 generic.go:334] "Generic (PLEG): container finished" podID="430635c4-315a-4af6-83d9-3e4c7407266d" containerID="32da858c88dead014722089bda64f39517752e8dbd2acda26e51d728a55e4e78" exitCode=0 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.506746 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerDied","Data":"32da858c88dead014722089bda64f39517752e8dbd2acda26e51d728a55e4e78"} Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.528840 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.529297 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q5qf5" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="registry-server" containerID="cri-o://01bb8a9a9752e8e14258c09bf94c0f1f484f65fe62dff63b464e559ad0069800" gracePeriod=2 Dec 03 08:43:40 crc kubenswrapper[4576]: I1203 08:43:40.560801 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:41 crc kubenswrapper[4576]: I1203 08:43:41.526854 4576 generic.go:334] "Generic (PLEG): container finished" podID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerID="01bb8a9a9752e8e14258c09bf94c0f1f484f65fe62dff63b464e559ad0069800" exitCode=0 Dec 03 08:43:41 crc kubenswrapper[4576]: I1203 08:43:41.526954 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerDied","Data":"01bb8a9a9752e8e14258c09bf94c0f1f484f65fe62dff63b464e559ad0069800"} Dec 03 08:43:41 crc kubenswrapper[4576]: I1203 08:43:41.529603 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.460801 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.536129 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4"} Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.554984 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerStarted","Data":"0a0048aae2828e686a5b8c863d3981cd9bd199a1f0ffc0b239515bf6b0ccfc29"} Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.567270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5qf5" event={"ID":"21e70344-efa0-4c1b-a490-a52a1ffcdd13","Type":"ContainerDied","Data":"2ff9523a9258ba380e3eeb8abd4fbbf2bc6d9dcb446c4bb16fea3af49711b6a0"} Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.567345 4576 scope.go:117] "RemoveContainer" containerID="01bb8a9a9752e8e14258c09bf94c0f1f484f65fe62dff63b464e559ad0069800" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.567471 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5qf5" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.578239 4576 generic.go:334] "Generic (PLEG): container finished" podID="711712fe-5398-42c5-bff6-d8f984c47764" containerID="c58d9bd62a324d85836a4f3c0807c553b317bf9e351551258d8bde31385860bd" exitCode=0 Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.578321 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerDied","Data":"c58d9bd62a324d85836a4f3c0807c553b317bf9e351551258d8bde31385860bd"} Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.591988 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerStarted","Data":"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d"} Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.592116 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4zqw\" (UniqueName: \"kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw\") pod \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.592193 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities\") pod \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.592244 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content\") pod \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\" (UID: \"21e70344-efa0-4c1b-a490-a52a1ffcdd13\") " Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.593780 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-66ngh" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="registry-server" containerID="cri-o://60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d" gracePeriod=2 Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.595584 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities" (OuterVolumeSpecName: "utilities") pod "21e70344-efa0-4c1b-a490-a52a1ffcdd13" (UID: "21e70344-efa0-4c1b-a490-a52a1ffcdd13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.598749 4576 scope.go:117] "RemoveContainer" containerID="9a93f020d88f2444caea124a63d1138df45ee7cc984a60a23f944802cd6d525c" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.600363 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw" (OuterVolumeSpecName: "kube-api-access-c4zqw") pod "21e70344-efa0-4c1b-a490-a52a1ffcdd13" (UID: "21e70344-efa0-4c1b-a490-a52a1ffcdd13"). InnerVolumeSpecName "kube-api-access-c4zqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.631646 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21e70344-efa0-4c1b-a490-a52a1ffcdd13" (UID: "21e70344-efa0-4c1b-a490-a52a1ffcdd13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.666811 4576 scope.go:117] "RemoveContainer" containerID="c1a0d473ea305aa09ae8bb7de6e8b2a02947bfd732af990b453fcb25e72102ab" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.695474 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4zqw\" (UniqueName: \"kubernetes.io/projected/21e70344-efa0-4c1b-a490-a52a1ffcdd13-kube-api-access-c4zqw\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.695508 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.695519 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21e70344-efa0-4c1b-a490-a52a1ffcdd13-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.913598 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.914017 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5qf5"] Dec 03 08:43:42 crc kubenswrapper[4576]: I1203 08:43:42.989994 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.101333 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksmhr\" (UniqueName: \"kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr\") pod \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.101393 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities\") pod \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.101516 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content\") pod \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\" (UID: \"e48f6c7a-7a1c-41aa-8f03-93df84b64d03\") " Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.103063 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities" (OuterVolumeSpecName: "utilities") pod "e48f6c7a-7a1c-41aa-8f03-93df84b64d03" (UID: "e48f6c7a-7a1c-41aa-8f03-93df84b64d03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.126658 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr" (OuterVolumeSpecName: "kube-api-access-ksmhr") pod "e48f6c7a-7a1c-41aa-8f03-93df84b64d03" (UID: "e48f6c7a-7a1c-41aa-8f03-93df84b64d03"). InnerVolumeSpecName "kube-api-access-ksmhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.202896 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksmhr\" (UniqueName: \"kubernetes.io/projected/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-kube-api-access-ksmhr\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.202939 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.227951 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e48f6c7a-7a1c-41aa-8f03-93df84b64d03" (UID: "e48f6c7a-7a1c-41aa-8f03-93df84b64d03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.303901 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e48f6c7a-7a1c-41aa-8f03-93df84b64d03-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.598715 4576 generic.go:334] "Generic (PLEG): container finished" podID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerID="0a0048aae2828e686a5b8c863d3981cd9bd199a1f0ffc0b239515bf6b0ccfc29" exitCode=0 Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.598802 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerDied","Data":"0a0048aae2828e686a5b8c863d3981cd9bd199a1f0ffc0b239515bf6b0ccfc29"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.603592 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerStarted","Data":"7b4a4265abf22e3f3d5d5f479cce79a6e4038d9d11a2b95d0b88c634b34b3126"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.607564 4576 generic.go:334] "Generic (PLEG): container finished" podID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerID="60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d" exitCode=0 Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.607610 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerDied","Data":"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.607657 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-66ngh" event={"ID":"e48f6c7a-7a1c-41aa-8f03-93df84b64d03","Type":"ContainerDied","Data":"66ab2e1e70b6d38b4c8957cbc33f23ceed10f6ff3bbe7870177fc5fda13680ee"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.607677 4576 scope.go:117] "RemoveContainer" containerID="60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.607763 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-66ngh" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.609928 4576 generic.go:334] "Generic (PLEG): container finished" podID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerID="8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d" exitCode=0 Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.609966 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerDied","Data":"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.626198 4576 scope.go:117] "RemoveContainer" containerID="29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.627283 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerStarted","Data":"9b3bd9c10f35901b401f565cb3d769555bd31516c74c5ee017f3267e8b8be99a"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.636057 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerStarted","Data":"cf5f86175dcf96038f8e6f307e1bee0b2ad3ae96c0f4f10eff9cbc4650cc2adc"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.640161 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerStarted","Data":"5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d"} Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.656350 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gmpkg" podStartSLOduration=4.806942712 podStartE2EDuration="1m8.656307971s" podCreationTimestamp="2025-12-03 08:42:35 +0000 UTC" firstStartedPulling="2025-12-03 08:42:38.483591102 +0000 UTC m=+165.869568086" lastFinishedPulling="2025-12-03 08:43:42.332956371 +0000 UTC m=+229.718933345" observedRunningTime="2025-12-03 08:43:43.654915709 +0000 UTC m=+231.040892693" watchObservedRunningTime="2025-12-03 08:43:43.656307971 +0000 UTC m=+231.042284955" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.658251 4576 scope.go:117] "RemoveContainer" containerID="f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.676204 4576 scope.go:117] "RemoveContainer" containerID="60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d" Dec 03 08:43:43 crc kubenswrapper[4576]: E1203 08:43:43.676810 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d\": container with ID starting with 60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d not found: ID does not exist" containerID="60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.676869 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d"} err="failed to get container status \"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d\": rpc error: code = NotFound desc = could not find container \"60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d\": container with ID starting with 60757320aa363ad99b993940482fed7b097e40fc19e5a4745cd358fb06207c6d not found: ID does not exist" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.676901 4576 scope.go:117] "RemoveContainer" containerID="29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce" Dec 03 08:43:43 crc kubenswrapper[4576]: E1203 08:43:43.677265 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce\": container with ID starting with 29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce not found: ID does not exist" containerID="29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.677287 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce"} err="failed to get container status \"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce\": rpc error: code = NotFound desc = could not find container \"29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce\": container with ID starting with 29e7bee41af38408e9ae1cabd8f8b4d5b596699e07d12c4c679ed906dada2dce not found: ID does not exist" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.677304 4576 scope.go:117] "RemoveContainer" containerID="f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663" Dec 03 08:43:43 crc kubenswrapper[4576]: E1203 08:43:43.684046 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663\": container with ID starting with f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663 not found: ID does not exist" containerID="f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.684089 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663"} err="failed to get container status \"f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663\": rpc error: code = NotFound desc = could not find container \"f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663\": container with ID starting with f81c88532dd6c1bb445430ac34512721d019d018c2e5953c42a012cb54e3e663 not found: ID does not exist" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.686002 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rxb6h" podStartSLOduration=4.09511018 podStartE2EDuration="1m8.68598953s" podCreationTimestamp="2025-12-03 08:42:35 +0000 UTC" firstStartedPulling="2025-12-03 08:42:38.393729788 +0000 UTC m=+165.779706772" lastFinishedPulling="2025-12-03 08:43:42.984609138 +0000 UTC m=+230.370586122" observedRunningTime="2025-12-03 08:43:43.685826886 +0000 UTC m=+231.071803880" watchObservedRunningTime="2025-12-03 08:43:43.68598953 +0000 UTC m=+231.071966514" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.687425 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" path="/var/lib/kubelet/pods/21e70344-efa0-4c1b-a490-a52a1ffcdd13/volumes" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.703546 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.709456 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-66ngh"] Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.751775 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lcs7f" podStartSLOduration=4.340590357 podStartE2EDuration="1m6.7517535s" podCreationTimestamp="2025-12-03 08:42:37 +0000 UTC" firstStartedPulling="2025-12-03 08:42:39.987500416 +0000 UTC m=+167.373477400" lastFinishedPulling="2025-12-03 08:43:42.398663549 +0000 UTC m=+229.784640543" observedRunningTime="2025-12-03 08:43:43.749629008 +0000 UTC m=+231.135605992" watchObservedRunningTime="2025-12-03 08:43:43.7517535 +0000 UTC m=+231.137730484" Dec 03 08:43:43 crc kubenswrapper[4576]: I1203 08:43:43.773935 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kqqpt" podStartSLOduration=3.722502529 podStartE2EDuration="1m7.773913267s" podCreationTimestamp="2025-12-03 08:42:36 +0000 UTC" firstStartedPulling="2025-12-03 08:42:38.38669521 +0000 UTC m=+165.772672194" lastFinishedPulling="2025-12-03 08:43:42.438105948 +0000 UTC m=+229.824082932" observedRunningTime="2025-12-03 08:43:43.77131248 +0000 UTC m=+231.157289464" watchObservedRunningTime="2025-12-03 08:43:43.773913267 +0000 UTC m=+231.159890241" Dec 03 08:43:44 crc kubenswrapper[4576]: I1203 08:43:44.653521 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerStarted","Data":"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63"} Dec 03 08:43:44 crc kubenswrapper[4576]: I1203 08:43:44.656195 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerStarted","Data":"dc835fbf2a0ad9458c6e49d394afc9dd83042c17eee9a776e680d302f1b4eb78"} Dec 03 08:43:44 crc kubenswrapper[4576]: I1203 08:43:44.730086 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hmmm5" podStartSLOduration=3.162742823 podStartE2EDuration="1m8.730062291s" podCreationTimestamp="2025-12-03 08:42:36 +0000 UTC" firstStartedPulling="2025-12-03 08:42:38.426749655 +0000 UTC m=+165.812726639" lastFinishedPulling="2025-12-03 08:43:43.994069123 +0000 UTC m=+231.380046107" observedRunningTime="2025-12-03 08:43:44.699019801 +0000 UTC m=+232.084996815" watchObservedRunningTime="2025-12-03 08:43:44.730062291 +0000 UTC m=+232.116039285" Dec 03 08:43:45 crc kubenswrapper[4576]: I1203 08:43:45.958737 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" path="/var/lib/kubelet/pods/e48f6c7a-7a1c-41aa-8f03-93df84b64d03/volumes" Dec 03 08:43:45 crc kubenswrapper[4576]: I1203 08:43:45.961361 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:43:45 crc kubenswrapper[4576]: I1203 08:43:45.961410 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:43:45 crc kubenswrapper[4576]: I1203 08:43:45.980478 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.019438 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2ngh2" podStartSLOduration=6.256263315 podStartE2EDuration="1m8.019414553s" podCreationTimestamp="2025-12-03 08:42:38 +0000 UTC" firstStartedPulling="2025-12-03 08:42:42.208972115 +0000 UTC m=+169.594949099" lastFinishedPulling="2025-12-03 08:43:43.972123353 +0000 UTC m=+231.358100337" observedRunningTime="2025-12-03 08:43:44.736074429 +0000 UTC m=+232.122051413" watchObservedRunningTime="2025-12-03 08:43:46.019414553 +0000 UTC m=+233.405391557" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.117594 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.117645 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.155246 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.659470 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.659592 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.714812 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.928351 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.928405 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:46 crc kubenswrapper[4576]: I1203 08:43:46.972279 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:47 crc kubenswrapper[4576]: I1203 08:43:47.718960 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:48 crc kubenswrapper[4576]: I1203 08:43:48.282577 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:43:48 crc kubenswrapper[4576]: I1203 08:43:48.282645 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:43:48 crc kubenswrapper[4576]: I1203 08:43:48.320291 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:43:48 crc kubenswrapper[4576]: I1203 08:43:48.732129 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:43:49 crc kubenswrapper[4576]: I1203 08:43:49.130676 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:43:49 crc kubenswrapper[4576]: I1203 08:43:49.130750 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:43:50 crc kubenswrapper[4576]: I1203 08:43:50.164292 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2ngh2" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="registry-server" probeResult="failure" output=< Dec 03 08:43:50 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 08:43:50 crc kubenswrapper[4576]: > Dec 03 08:43:50 crc kubenswrapper[4576]: I1203 08:43:50.653663 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" containerName="oauth-openshift" containerID="cri-o://e482b9c26690d11c937f503bbc0b4c8f87e75c2ad3fde26df140cb6953290c50" gracePeriod=15 Dec 03 08:43:50 crc kubenswrapper[4576]: I1203 08:43:50.933118 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:43:50 crc kubenswrapper[4576]: I1203 08:43:50.934197 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kqqpt" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="registry-server" containerID="cri-o://cf5f86175dcf96038f8e6f307e1bee0b2ad3ae96c0f4f10eff9cbc4650cc2adc" gracePeriod=2 Dec 03 08:43:53 crc kubenswrapper[4576]: I1203 08:43:53.716500 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2991975-3b10-4f7d-aa48-750e3c402870" containerID="e482b9c26690d11c937f503bbc0b4c8f87e75c2ad3fde26df140cb6953290c50" exitCode=0 Dec 03 08:43:53 crc kubenswrapper[4576]: I1203 08:43:53.716573 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" event={"ID":"a2991975-3b10-4f7d-aa48-750e3c402870","Type":"ContainerDied","Data":"e482b9c26690d11c937f503bbc0b4c8f87e75c2ad3fde26df140cb6953290c50"} Dec 03 08:43:53 crc kubenswrapper[4576]: I1203 08:43:53.719052 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kqqpt_430635c4-315a-4af6-83d9-3e4c7407266d/registry-server/0.log" Dec 03 08:43:53 crc kubenswrapper[4576]: I1203 08:43:53.720229 4576 generic.go:334] "Generic (PLEG): container finished" podID="430635c4-315a-4af6-83d9-3e4c7407266d" containerID="cf5f86175dcf96038f8e6f307e1bee0b2ad3ae96c0f4f10eff9cbc4650cc2adc" exitCode=137 Dec 03 08:43:53 crc kubenswrapper[4576]: I1203 08:43:53.720272 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerDied","Data":"cf5f86175dcf96038f8e6f307e1bee0b2ad3ae96c0f4f10eff9cbc4650cc2adc"} Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.595624 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.727589 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" event={"ID":"a2991975-3b10-4f7d-aa48-750e3c402870","Type":"ContainerDied","Data":"cdf8e93fe377693d8b35ed6fd1cd1fde17be6b97dc22655cafe243cf04a8fa81"} Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.727649 4576 scope.go:117] "RemoveContainer" containerID="e482b9c26690d11c937f503bbc0b4c8f87e75c2ad3fde26df140cb6953290c50" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.727648 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-76tfl" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776134 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p28dj\" (UniqueName: \"kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776477 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776510 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776596 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776628 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776668 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776689 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776826 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776851 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776882 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776907 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776927 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776946 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.776974 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data\") pod \"a2991975-3b10-4f7d-aa48-750e3c402870\" (UID: \"a2991975-3b10-4f7d-aa48-750e3c402870\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.778089 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.778096 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.780101 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.780489 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.780924 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782126 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782166 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj" (OuterVolumeSpecName: "kube-api-access-p28dj") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "kube-api-access-p28dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782237 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782501 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782718 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.782871 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.784464 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.785115 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.787542 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a2991975-3b10-4f7d-aa48-750e3c402870" (UID: "a2991975-3b10-4f7d-aa48-750e3c402870"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.807501 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kqqpt_430635c4-315a-4af6-83d9-3e4c7407266d/registry-server/0.log" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.808437 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878183 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878236 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878248 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878258 4576 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878268 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878279 4576 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a2991975-3b10-4f7d-aa48-750e3c402870-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878290 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878300 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p28dj\" (UniqueName: \"kubernetes.io/projected/a2991975-3b10-4f7d-aa48-750e3c402870-kube-api-access-p28dj\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878309 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878317 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878326 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878336 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878345 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.878354 4576 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a2991975-3b10-4f7d-aa48-750e3c402870-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.979087 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content\") pod \"430635c4-315a-4af6-83d9-3e4c7407266d\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.979207 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2jjs\" (UniqueName: \"kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs\") pod \"430635c4-315a-4af6-83d9-3e4c7407266d\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.979307 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities\") pod \"430635c4-315a-4af6-83d9-3e4c7407266d\" (UID: \"430635c4-315a-4af6-83d9-3e4c7407266d\") " Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.980807 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities" (OuterVolumeSpecName: "utilities") pod "430635c4-315a-4af6-83d9-3e4c7407266d" (UID: "430635c4-315a-4af6-83d9-3e4c7407266d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:54 crc kubenswrapper[4576]: I1203 08:43:54.982425 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs" (OuterVolumeSpecName: "kube-api-access-j2jjs") pod "430635c4-315a-4af6-83d9-3e4c7407266d" (UID: "430635c4-315a-4af6-83d9-3e4c7407266d"). InnerVolumeSpecName "kube-api-access-j2jjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.063329 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.069338 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-76tfl"] Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.075120 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "430635c4-315a-4af6-83d9-3e4c7407266d" (UID: "430635c4-315a-4af6-83d9-3e4c7407266d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.080412 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.080509 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2jjs\" (UniqueName: \"kubernetes.io/projected/430635c4-315a-4af6-83d9-3e4c7407266d-kube-api-access-j2jjs\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.080593 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/430635c4-315a-4af6-83d9-3e4c7407266d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.169587 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-554f5c8786-6wt4r"] Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.169905 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="729366d7-b81d-4984-9279-bfa7d2b9e99d" containerName="pruner" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.169927 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="729366d7-b81d-4984-9279-bfa7d2b9e99d" containerName="pruner" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.169948 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.169957 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.169972 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.169981 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.169991 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170000 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170013 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170020 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170032 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170040 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170052 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170060 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170071 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170111 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170129 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170137 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="extract-content" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170149 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170156 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="extract-utilities" Dec 03 08:43:55 crc kubenswrapper[4576]: E1203 08:43:55.170168 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" containerName="oauth-openshift" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170177 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" containerName="oauth-openshift" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170298 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e70344-efa0-4c1b-a490-a52a1ffcdd13" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170352 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170364 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" containerName="oauth-openshift" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170378 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="729366d7-b81d-4984-9279-bfa7d2b9e99d" containerName="pruner" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.170390 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48f6c7a-7a1c-41aa-8f03-93df84b64d03" containerName="registry-server" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.171007 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.175433 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.175789 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.175821 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.176198 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.176386 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.176590 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.176595 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.177187 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.177892 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.184934 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.185317 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.188870 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-554f5c8786-6wt4r"] Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.189429 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.190339 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.201941 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.204231 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.288442 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.288558 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-service-ca\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.288648 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.288688 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-router-certs\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.288733 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-policies\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289117 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289177 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqw5z\" (UniqueName: \"kubernetes.io/projected/f2484eac-79e3-4477-a975-ed7dfdb72abd-kube-api-access-fqw5z\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289236 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-dir\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289300 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-session\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289402 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289453 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-error\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289490 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-login\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289656 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.289715 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390513 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-policies\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390721 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqw5z\" (UniqueName: \"kubernetes.io/projected/f2484eac-79e3-4477-a975-ed7dfdb72abd-kube-api-access-fqw5z\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390868 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-dir\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390926 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-session\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.390980 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391042 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-dir\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391084 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-error\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391148 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-login\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391266 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391319 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391409 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391462 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-service-ca\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391583 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.391654 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-router-certs\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.393006 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.393358 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-audit-policies\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.394897 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-service-ca\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.397468 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-error\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.397553 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.399385 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.399483 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.401562 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-login\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.401705 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-router-certs\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.404427 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-session\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.406020 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.406080 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f2484eac-79e3-4477-a975-ed7dfdb72abd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.418731 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqw5z\" (UniqueName: \"kubernetes.io/projected/f2484eac-79e3-4477-a975-ed7dfdb72abd-kube-api-access-fqw5z\") pod \"oauth-openshift-554f5c8786-6wt4r\" (UID: \"f2484eac-79e3-4477-a975-ed7dfdb72abd\") " pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.515469 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.694130 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2991975-3b10-4f7d-aa48-750e3c402870" path="/var/lib/kubelet/pods/a2991975-3b10-4f7d-aa48-750e3c402870/volumes" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.738152 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-kqqpt_430635c4-315a-4af6-83d9-3e4c7407266d/registry-server/0.log" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.740398 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kqqpt" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.740649 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kqqpt" event={"ID":"430635c4-315a-4af6-83d9-3e4c7407266d","Type":"ContainerDied","Data":"a292ba03fa232640895072b9744e16154b79b5b5e6bb66b7f8b7551dd708ca6b"} Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.740700 4576 scope.go:117] "RemoveContainer" containerID="cf5f86175dcf96038f8e6f307e1bee0b2ad3ae96c0f4f10eff9cbc4650cc2adc" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.763594 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.771875 4576 scope.go:117] "RemoveContainer" containerID="32da858c88dead014722089bda64f39517752e8dbd2acda26e51d728a55e4e78" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.775336 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kqqpt"] Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.779181 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-554f5c8786-6wt4r"] Dec 03 08:43:55 crc kubenswrapper[4576]: W1203 08:43:55.783748 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2484eac_79e3_4477_a975_ed7dfdb72abd.slice/crio-fd4a24c9971fd4ec023565b7dad94df3884e5baeb4b00eb3ff573b1d629b9f1b WatchSource:0}: Error finding container fd4a24c9971fd4ec023565b7dad94df3884e5baeb4b00eb3ff573b1d629b9f1b: Status 404 returned error can't find the container with id fd4a24c9971fd4ec023565b7dad94df3884e5baeb4b00eb3ff573b1d629b9f1b Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.793875 4576 scope.go:117] "RemoveContainer" containerID="b420d506ac1476af64f3d2f9932966f9f6d4854267101d67d6593bee8039fb5a" Dec 03 08:43:55 crc kubenswrapper[4576]: I1203 08:43:55.967868 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:43:56 crc kubenswrapper[4576]: I1203 08:43:56.260907 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:43:56 crc kubenswrapper[4576]: I1203 08:43:56.701632 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:56 crc kubenswrapper[4576]: I1203 08:43:56.759068 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" event={"ID":"f2484eac-79e3-4477-a975-ed7dfdb72abd","Type":"ContainerStarted","Data":"fd4a24c9971fd4ec023565b7dad94df3884e5baeb4b00eb3ff573b1d629b9f1b"} Dec 03 08:43:57 crc kubenswrapper[4576]: I1203 08:43:57.687941 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="430635c4-315a-4af6-83d9-3e4c7407266d" path="/var/lib/kubelet/pods/430635c4-315a-4af6-83d9-3e4c7407266d/volumes" Dec 03 08:43:57 crc kubenswrapper[4576]: I1203 08:43:57.767044 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" event={"ID":"f2484eac-79e3-4477-a975-ed7dfdb72abd","Type":"ContainerStarted","Data":"0ff26831dec227ccb5835e7257996334c6c3abb640010edecfd37fad9e225e09"} Dec 03 08:43:57 crc kubenswrapper[4576]: I1203 08:43:57.767573 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:57 crc kubenswrapper[4576]: I1203 08:43:57.776932 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" Dec 03 08:43:57 crc kubenswrapper[4576]: I1203 08:43:57.796293 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-554f5c8786-6wt4r" podStartSLOduration=32.796259208 podStartE2EDuration="32.796259208s" podCreationTimestamp="2025-12-03 08:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:43:57.79498395 +0000 UTC m=+245.180960944" watchObservedRunningTime="2025-12-03 08:43:57.796259208 +0000 UTC m=+245.182236232" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.321895 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.322237 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hmmm5" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="registry-server" containerID="cri-o://cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63" gracePeriod=2 Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.703249 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.772735 4576 generic.go:334] "Generic (PLEG): container finished" podID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerID="cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63" exitCode=0 Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.773337 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmmm5" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.773743 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerDied","Data":"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63"} Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.774048 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmmm5" event={"ID":"558ce81e-ebdc-4b7d-bd04-a3b485857825","Type":"ContainerDied","Data":"3ecdfa377a57f882efe8b716f120bff3376709ebe14fb7d0053405ad27e3b697"} Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.774068 4576 scope.go:117] "RemoveContainer" containerID="cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.792774 4576 scope.go:117] "RemoveContainer" containerID="8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.806809 4576 scope.go:117] "RemoveContainer" containerID="4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.828754 4576 scope.go:117] "RemoveContainer" containerID="cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63" Dec 03 08:43:58 crc kubenswrapper[4576]: E1203 08:43:58.829036 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63\": container with ID starting with cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63 not found: ID does not exist" containerID="cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.829072 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63"} err="failed to get container status \"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63\": rpc error: code = NotFound desc = could not find container \"cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63\": container with ID starting with cd1bf63624ead73f7e0c42788612f4c37b394db8dba9994b12523f1f59ed9c63 not found: ID does not exist" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.829094 4576 scope.go:117] "RemoveContainer" containerID="8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d" Dec 03 08:43:58 crc kubenswrapper[4576]: E1203 08:43:58.829285 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d\": container with ID starting with 8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d not found: ID does not exist" containerID="8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.829301 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d"} err="failed to get container status \"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d\": rpc error: code = NotFound desc = could not find container \"8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d\": container with ID starting with 8defbe575c79dabe0d3e8bb69e6fddfcd881303238fb8165cbd9b99f8f73cd0d not found: ID does not exist" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.829314 4576 scope.go:117] "RemoveContainer" containerID="4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec" Dec 03 08:43:58 crc kubenswrapper[4576]: E1203 08:43:58.829479 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec\": container with ID starting with 4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec not found: ID does not exist" containerID="4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.829493 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec"} err="failed to get container status \"4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec\": rpc error: code = NotFound desc = could not find container \"4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec\": container with ID starting with 4195812fd7361a87ded0de2f26e05f434457e5359039f2b6a765c4925d7649ec not found: ID does not exist" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.847410 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities\") pod \"558ce81e-ebdc-4b7d-bd04-a3b485857825\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.847560 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7njkb\" (UniqueName: \"kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb\") pod \"558ce81e-ebdc-4b7d-bd04-a3b485857825\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.847596 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content\") pod \"558ce81e-ebdc-4b7d-bd04-a3b485857825\" (UID: \"558ce81e-ebdc-4b7d-bd04-a3b485857825\") " Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.848657 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities" (OuterVolumeSpecName: "utilities") pod "558ce81e-ebdc-4b7d-bd04-a3b485857825" (UID: "558ce81e-ebdc-4b7d-bd04-a3b485857825"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.853800 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb" (OuterVolumeSpecName: "kube-api-access-7njkb") pod "558ce81e-ebdc-4b7d-bd04-a3b485857825" (UID: "558ce81e-ebdc-4b7d-bd04-a3b485857825"). InnerVolumeSpecName "kube-api-access-7njkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.899219 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "558ce81e-ebdc-4b7d-bd04-a3b485857825" (UID: "558ce81e-ebdc-4b7d-bd04-a3b485857825"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.949009 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7njkb\" (UniqueName: \"kubernetes.io/projected/558ce81e-ebdc-4b7d-bd04-a3b485857825-kube-api-access-7njkb\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.949042 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:58 crc kubenswrapper[4576]: I1203 08:43:58.949056 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558ce81e-ebdc-4b7d-bd04-a3b485857825-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:43:59 crc kubenswrapper[4576]: I1203 08:43:59.116044 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:43:59 crc kubenswrapper[4576]: I1203 08:43:59.119554 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hmmm5"] Dec 03 08:43:59 crc kubenswrapper[4576]: I1203 08:43:59.180739 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:43:59 crc kubenswrapper[4576]: I1203 08:43:59.215048 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:43:59 crc kubenswrapper[4576]: I1203 08:43:59.689758 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" path="/var/lib/kubelet/pods/558ce81e-ebdc-4b7d-bd04-a3b485857825/volumes" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.644643 4576 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.645173 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="extract-utilities" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.645188 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="extract-utilities" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.645200 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="registry-server" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.645210 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="registry-server" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.645224 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="extract-content" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.645232 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="extract-content" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.645388 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="558ce81e-ebdc-4b7d-bd04-a3b485857825" containerName="registry-server" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.645782 4576 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646069 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e" gracePeriod=15 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646148 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646274 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf" gracePeriod=15 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646324 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce" gracePeriod=15 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646356 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6" gracePeriod=15 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.646385 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b" gracePeriod=15 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649448 4576 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649727 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649744 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649756 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649766 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649781 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649789 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649799 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649808 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649834 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649842 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.649852 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649860 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.649990 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650004 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650012 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650026 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650036 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650048 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 08:44:01 crc kubenswrapper[4576]: E1203 08:44:01.650161 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.650171 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.736378 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791514 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791605 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791640 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791684 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791699 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791723 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791808 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.791866 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.803241 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.805703 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.806429 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf" exitCode=0 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.806463 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce" exitCode=0 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.806472 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6" exitCode=0 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.806480 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b" exitCode=2 Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.806547 4576 scope.go:117] "RemoveContainer" containerID="7e898c9e0d7c8192ab97aa18a8d9774bc1db0411f53aaa0f57a3aeb70e1e0706" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.892981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893036 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893065 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893097 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893109 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893132 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893145 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893164 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893168 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893191 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893219 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893234 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893239 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893259 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893286 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.893284 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:01 crc kubenswrapper[4576]: I1203 08:44:01.997095 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:02 crc kubenswrapper[4576]: E1203 08:44:02.032681 4576 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.136:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187da816232ba300 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 08:44:02.03191168 +0000 UTC m=+249.417888664,LastTimestamp:2025-12-03 08:44:02.03191168 +0000 UTC m=+249.417888664,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.814713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6"} Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.814797 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"41edfde2e3ff529be6052cf5901da4344e68bb05dfb3fa1116252fa60c3af0e8"} Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.815769 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.819748 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.822910 4576 generic.go:334] "Generic (PLEG): container finished" podID="8966b99c-3a63-452f-9150-dd0548ecd89a" containerID="c272629df5972b840b34cb30ec78934ee0de9bc8b75ad2e3aaaf0f51676d940a" exitCode=0 Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.822964 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8966b99c-3a63-452f-9150-dd0548ecd89a","Type":"ContainerDied","Data":"c272629df5972b840b34cb30ec78934ee0de9bc8b75ad2e3aaaf0f51676d940a"} Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.823986 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:02 crc kubenswrapper[4576]: I1203 08:44:02.824631 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:03 crc kubenswrapper[4576]: I1203 08:44:03.680268 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:03 crc kubenswrapper[4576]: I1203 08:44:03.680889 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.141734 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.143076 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.144463 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.145072 4576 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.145595 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.152333 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.152991 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.153434 4576 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.153839 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268490 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268555 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock\") pod \"8966b99c-3a63-452f-9150-dd0548ecd89a\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268581 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268601 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268667 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access\") pod \"8966b99c-3a63-452f-9150-dd0548ecd89a\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268682 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir\") pod \"8966b99c-3a63-452f-9150-dd0548ecd89a\" (UID: \"8966b99c-3a63-452f-9150-dd0548ecd89a\") " Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268683 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock" (OuterVolumeSpecName: "var-lock") pod "8966b99c-3a63-452f-9150-dd0548ecd89a" (UID: "8966b99c-3a63-452f-9150-dd0548ecd89a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268747 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268700 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268856 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8966b99c-3a63-452f-9150-dd0548ecd89a" (UID: "8966b99c-3a63-452f-9150-dd0548ecd89a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.268820 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.269173 4576 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.269226 4576 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.269242 4576 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.269258 4576 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8966b99c-3a63-452f-9150-dd0548ecd89a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.269276 4576 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.274461 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8966b99c-3a63-452f-9150-dd0548ecd89a" (UID: "8966b99c-3a63-452f-9150-dd0548ecd89a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.370972 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8966b99c-3a63-452f-9150-dd0548ecd89a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.896454 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.898647 4576 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e" exitCode=0 Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.898793 4576 scope.go:117] "RemoveContainer" containerID="d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.898792 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.901155 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8966b99c-3a63-452f-9150-dd0548ecd89a","Type":"ContainerDied","Data":"734fbc47950b0a2096160c2ea264b83c9234815b0eac488e7b6e859ebb510a80"} Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.901222 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="734fbc47950b0a2096160c2ea264b83c9234815b0eac488e7b6e859ebb510a80" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.901296 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.918207 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.919090 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.919428 4576 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.923460 4576 scope.go:117] "RemoveContainer" containerID="674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.931908 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.932492 4576 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.933402 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.940412 4576 scope.go:117] "RemoveContainer" containerID="49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.955153 4576 scope.go:117] "RemoveContainer" containerID="7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.979093 4576 scope.go:117] "RemoveContainer" containerID="0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e" Dec 03 08:44:04 crc kubenswrapper[4576]: I1203 08:44:04.999299 4576 scope.go:117] "RemoveContainer" containerID="91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.026240 4576 scope.go:117] "RemoveContainer" containerID="d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.029066 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\": container with ID starting with d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf not found: ID does not exist" containerID="d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.029131 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf"} err="failed to get container status \"d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\": rpc error: code = NotFound desc = could not find container \"d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf\": container with ID starting with d03cbab5a546a170575cec56a394eb685e0fc9389bd2c0a90357dfbd0a03c4cf not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.029190 4576 scope.go:117] "RemoveContainer" containerID="674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.029911 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\": container with ID starting with 674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce not found: ID does not exist" containerID="674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.030001 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce"} err="failed to get container status \"674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\": rpc error: code = NotFound desc = could not find container \"674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce\": container with ID starting with 674bf025d7a2aac2e625c6c70d8e6ca62aa14c5b6db62d087e875fad469218ce not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.030032 4576 scope.go:117] "RemoveContainer" containerID="49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.030801 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\": container with ID starting with 49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6 not found: ID does not exist" containerID="49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.030850 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6"} err="failed to get container status \"49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\": rpc error: code = NotFound desc = could not find container \"49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6\": container with ID starting with 49275a1fb6db38574bf885de86b201968b36ce6035f82df2c0ef616a90b5dad6 not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.030883 4576 scope.go:117] "RemoveContainer" containerID="7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.031313 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\": container with ID starting with 7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b not found: ID does not exist" containerID="7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.031345 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b"} err="failed to get container status \"7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\": rpc error: code = NotFound desc = could not find container \"7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b\": container with ID starting with 7acf558dde45b063042e73cafea7cd44f9413440aaf2befab90b5dd695bd625b not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.031359 4576 scope.go:117] "RemoveContainer" containerID="0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.031925 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\": container with ID starting with 0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e not found: ID does not exist" containerID="0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.031992 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e"} err="failed to get container status \"0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\": rpc error: code = NotFound desc = could not find container \"0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e\": container with ID starting with 0eb0e697ea80c3b6357dd95c630d2bc6a97d4a027bd0c03c7602dd3d1714c17e not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.032033 4576 scope.go:117] "RemoveContainer" containerID="91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.032807 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\": container with ID starting with 91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16 not found: ID does not exist" containerID="91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.032847 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16"} err="failed to get container status \"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\": rpc error: code = NotFound desc = could not find container \"91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16\": container with ID starting with 91f07f80c06020f5bb45b1e01c583ed7b95cbb11c230a999197b0c0dd58dec16 not found: ID does not exist" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.078474 4576 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.079945 4576 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.080397 4576 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.081114 4576 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.081739 4576 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.081870 4576 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.082355 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="200ms" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.283471 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="400ms" Dec 03 08:44:05 crc kubenswrapper[4576]: E1203 08:44:05.684511 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="800ms" Dec 03 08:44:05 crc kubenswrapper[4576]: I1203 08:44:05.685652 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 08:44:06 crc kubenswrapper[4576]: E1203 08:44:06.486118 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="1.6s" Dec 03 08:44:08 crc kubenswrapper[4576]: E1203 08:44:08.087022 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="3.2s" Dec 03 08:44:11 crc kubenswrapper[4576]: E1203 08:44:11.182032 4576 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.136:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187da816232ba300 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 08:44:02.03191168 +0000 UTC m=+249.417888664,LastTimestamp:2025-12-03 08:44:02.03191168 +0000 UTC m=+249.417888664,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 08:44:11 crc kubenswrapper[4576]: E1203 08:44:11.289081 4576 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.136:6443: connect: connection refused" interval="6.4s" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.676405 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.683602 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.684349 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.684905 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.685110 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.694608 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.694650 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:13 crc kubenswrapper[4576]: E1203 08:44:13.694972 4576 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.695489 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.966666 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"894c535af71fbe86bbc3ba47f86bf6c500001662e5c8f82be760d441ee50c0d3"} Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.967047 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f7a6012a0d72ade2d4aa29ec1ce288932f5f7fb55590dd4f93736d59ae6009c8"} Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.967456 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.967478 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:13 crc kubenswrapper[4576]: E1203 08:44:13.967996 4576 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.968101 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:13 crc kubenswrapper[4576]: I1203 08:44:13.969276 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:14 crc kubenswrapper[4576]: E1203 08:44:14.749144 4576 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.129.56.136:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" volumeName="registry-storage" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.978397 4576 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="894c535af71fbe86bbc3ba47f86bf6c500001662e5c8f82be760d441ee50c0d3" exitCode=0 Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.978482 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"894c535af71fbe86bbc3ba47f86bf6c500001662e5c8f82be760d441ee50c0d3"} Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.979168 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.979285 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.979810 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:14 crc kubenswrapper[4576]: E1203 08:44:14.980046 4576 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.980396 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.984048 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.984124 4576 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb" exitCode=1 Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.984170 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb"} Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.984987 4576 scope.go:117] "RemoveContainer" containerID="994e53c2ec028aead6edf0c68233ae047ed51f178f5befddb93916a32781c3eb" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.985937 4576 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.986484 4576 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:14 crc kubenswrapper[4576]: I1203 08:44:14.987013 4576 status_manager.go:851] "Failed to get status for pod" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.136:6443: connect: connection refused" Dec 03 08:44:15 crc kubenswrapper[4576]: I1203 08:44:15.998925 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 08:44:15 crc kubenswrapper[4576]: I1203 08:44:15.999387 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"37392a9e938d37832f30fb5064b99bedd828001e3d78dbe47db64b9d9c57cc7d"} Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.017992 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"827539e81762474d4c605d52848521fbfd37640202533e3a75d445744e77fb86"} Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.018052 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bebf232e918f9c737a675a034d4cd7b3ea4f1bcbd5d6c3d4baf34f2d7f52714c"} Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.018065 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fd7851450c22ccf3e0ed3e0d01bdc8f54b6687aec0af8c49718aa76cbdc8367c"} Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.018077 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f36df080f3ca272f6905f5d9023a026abd9f31e58029cbb25a1673bb9934b4a8"} Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.850502 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:44:16 crc kubenswrapper[4576]: I1203 08:44:16.855393 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:44:17 crc kubenswrapper[4576]: I1203 08:44:17.026434 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ef1612e3382540a589e7f62a8f8dbd6577c28e440c0bc71eff9a26af285d867d"} Dec 03 08:44:17 crc kubenswrapper[4576]: I1203 08:44:17.026780 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:44:17 crc kubenswrapper[4576]: I1203 08:44:17.026824 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:17 crc kubenswrapper[4576]: I1203 08:44:17.026849 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:18 crc kubenswrapper[4576]: I1203 08:44:18.696159 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:18 crc kubenswrapper[4576]: I1203 08:44:18.696269 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:18 crc kubenswrapper[4576]: I1203 08:44:18.702172 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:22 crc kubenswrapper[4576]: I1203 08:44:22.036359 4576 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:23 crc kubenswrapper[4576]: I1203 08:44:23.059783 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:23 crc kubenswrapper[4576]: I1203 08:44:23.059892 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:23 crc kubenswrapper[4576]: I1203 08:44:23.060123 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:23 crc kubenswrapper[4576]: I1203 08:44:23.067000 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:23 crc kubenswrapper[4576]: I1203 08:44:23.701346 4576 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="16d4cb3e-cb0f-4a1e-8598-830d0000c1d7" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.064437 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.064476 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.067964 4576 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="16d4cb3e-cb0f-4a1e-8598-830d0000c1d7" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.971923 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.972043 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.972092 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.972154 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.974680 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.974860 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.974907 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.983474 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.984106 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.991291 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.996401 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:44:24 crc kubenswrapper[4576]: I1203 08:44:24.997595 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.029652 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.039763 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.053843 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.073430 4576 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.073465 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e5926f9c-3fa0-4df6-93a7-70f581151e0e" Dec 03 08:44:25 crc kubenswrapper[4576]: I1203 08:44:25.080497 4576 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="16d4cb3e-cb0f-4a1e-8598-830d0000c1d7" Dec 03 08:44:25 crc kubenswrapper[4576]: W1203 08:44:25.580487 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-86c53fda31b25997468728ac2fc089927d5a58424537c6783d82a42f9cffecbe WatchSource:0}: Error finding container 86c53fda31b25997468728ac2fc089927d5a58424537c6783d82a42f9cffecbe: Status 404 returned error can't find the container with id 86c53fda31b25997468728ac2fc089927d5a58424537c6783d82a42f9cffecbe Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.081101 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1bc447f1bbb35215e71c1b51a4666a525ba220797f02a5650a8a44059fb8095a"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.081218 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7aa741ad7069fdeb73322ce141f45a03c66e800cd95f7a2db9757fe476cf9b6e"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.084447 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4336b717022e5df155e275960c4f76fd9a8e735b22be83f0a90fc875c24b868b"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.084903 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d294c5ff55f83c6d31dc60b5356468ce3cbad6a5faf370a9dce67404002fc246"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.086264 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f38505c53e96bffd8f5276ce0db426db9a2c6f3168034d5b60b59e6f483c421d"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.086339 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"86c53fda31b25997468728ac2fc089927d5a58424537c6783d82a42f9cffecbe"} Dec 03 08:44:26 crc kubenswrapper[4576]: I1203 08:44:26.087202 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:44:27 crc kubenswrapper[4576]: I1203 08:44:27.092141 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Dec 03 08:44:27 crc kubenswrapper[4576]: I1203 08:44:27.092182 4576 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="1bc447f1bbb35215e71c1b51a4666a525ba220797f02a5650a8a44059fb8095a" exitCode=255 Dec 03 08:44:27 crc kubenswrapper[4576]: I1203 08:44:27.092782 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"1bc447f1bbb35215e71c1b51a4666a525ba220797f02a5650a8a44059fb8095a"} Dec 03 08:44:27 crc kubenswrapper[4576]: I1203 08:44:27.092977 4576 scope.go:117] "RemoveContainer" containerID="1bc447f1bbb35215e71c1b51a4666a525ba220797f02a5650a8a44059fb8095a" Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.109443 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.110261 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.110308 4576 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="d7847b653a39977814bd2a743b122f47af0a3dca0400ce4b310e199b1a355e33" exitCode=255 Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.110345 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"d7847b653a39977814bd2a743b122f47af0a3dca0400ce4b310e199b1a355e33"} Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.110381 4576 scope.go:117] "RemoveContainer" containerID="1bc447f1bbb35215e71c1b51a4666a525ba220797f02a5650a8a44059fb8095a" Dec 03 08:44:28 crc kubenswrapper[4576]: I1203 08:44:28.111313 4576 scope.go:117] "RemoveContainer" containerID="d7847b653a39977814bd2a743b122f47af0a3dca0400ce4b310e199b1a355e33" Dec 03 08:44:28 crc kubenswrapper[4576]: E1203 08:44:28.116984 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:44:29 crc kubenswrapper[4576]: I1203 08:44:29.098782 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 08:44:29 crc kubenswrapper[4576]: I1203 08:44:29.122983 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Dec 03 08:44:32 crc kubenswrapper[4576]: I1203 08:44:32.107603 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 08:44:32 crc kubenswrapper[4576]: I1203 08:44:32.387758 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 08:44:32 crc kubenswrapper[4576]: I1203 08:44:32.659516 4576 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.071639 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.252720 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.432987 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.433896 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.532053 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 08:44:33 crc kubenswrapper[4576]: I1203 08:44:33.839297 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.084185 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.393420 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.451965 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.452655 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.548493 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.669339 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 08:44:34 crc kubenswrapper[4576]: I1203 08:44:34.921837 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.058744 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.171179 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.428782 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.670975 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.687592 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 08:44:35 crc kubenswrapper[4576]: I1203 08:44:35.866128 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.083188 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.196357 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.223506 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.233021 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.342600 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.373747 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.520561 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.654499 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.728763 4576 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.731540 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=35.731462119 podStartE2EDuration="35.731462119s" podCreationTimestamp="2025-12-03 08:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:44:21.860336005 +0000 UTC m=+269.246312989" watchObservedRunningTime="2025-12-03 08:44:36.731462119 +0000 UTC m=+284.117439103" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.736582 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.736655 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.741301 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.795919 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=14.795886904 podStartE2EDuration="14.795886904s" podCreationTimestamp="2025-12-03 08:44:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:44:36.76634735 +0000 UTC m=+284.152324354" watchObservedRunningTime="2025-12-03 08:44:36.795886904 +0000 UTC m=+284.181863888" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.900198 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.951766 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.989733 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 08:44:36 crc kubenswrapper[4576]: I1203 08:44:36.993648 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.046390 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.135924 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.232954 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.326768 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.415107 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.453938 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.586617 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.716271 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.721603 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.738700 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.741516 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.755436 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.765684 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.859184 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.892430 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.977184 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 08:44:37 crc kubenswrapper[4576]: I1203 08:44:37.999913 4576 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.014094 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.018453 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.097514 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.127587 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.216587 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.288351 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.346067 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.437063 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.499041 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.518329 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.592933 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.645196 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.666947 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.724789 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.732933 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.748372 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.834164 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.898886 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.914083 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.967845 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 08:44:38 crc kubenswrapper[4576]: I1203 08:44:38.973730 4576 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.046970 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.048355 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.158801 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.232330 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.293336 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.296954 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.362222 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.372219 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.407391 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.433074 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.477681 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.500285 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.514257 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.841137 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.854706 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.960085 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 08:44:39 crc kubenswrapper[4576]: I1203 08:44:39.977101 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.006206 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.018943 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.027237 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.033450 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.067587 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.116116 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.201027 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.229265 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.281724 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.338610 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.397032 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.402733 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.407860 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.489916 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.532341 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.565592 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.660630 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.667000 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.677097 4576 scope.go:117] "RemoveContainer" containerID="d7847b653a39977814bd2a743b122f47af0a3dca0400ce4b310e199b1a355e33" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.725073 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.798195 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.800118 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.840866 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.958339 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 08:44:40 crc kubenswrapper[4576]: I1203 08:44:40.959364 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.033940 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.115635 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.131995 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.160519 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.218505 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.223766 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.223845 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd"} Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.233451 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.240120 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.285226 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.467228 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.602346 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.618918 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.722230 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.783981 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.857342 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.862477 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.963797 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.968095 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 08:44:41 crc kubenswrapper[4576]: I1203 08:44:41.972721 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.010714 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.030776 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.035273 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.081072 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.144338 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.159601 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.231891 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.232355 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.232405 4576 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd" exitCode=255 Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.232447 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd"} Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.232478 4576 scope.go:117] "RemoveContainer" containerID="d7847b653a39977814bd2a743b122f47af0a3dca0400ce4b310e199b1a355e33" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.232940 4576 scope.go:117] "RemoveContainer" containerID="8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd" Dec 03 08:44:42 crc kubenswrapper[4576]: E1203 08:44:42.233157 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.247370 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.305808 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.321232 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.360085 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.374514 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.467206 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.473469 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.525163 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.547664 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.550772 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.620497 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.907413 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.913670 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.937589 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 08:44:42 crc kubenswrapper[4576]: I1203 08:44:42.950073 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.011633 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.020608 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.247968 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.397236 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.422953 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.476460 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.484954 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.504965 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.525495 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.528596 4576 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.596902 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.620426 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.838116 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.897719 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 08:44:43 crc kubenswrapper[4576]: I1203 08:44:43.937025 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.025468 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.077447 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.101037 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.165533 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.165721 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.227449 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.384312 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.405604 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.430003 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.470020 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.573501 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.585572 4576 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.586018 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6" gracePeriod=5 Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.651458 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.693926 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.817454 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.827316 4576 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.843209 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.901324 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.902673 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.916268 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 08:44:44 crc kubenswrapper[4576]: I1203 08:44:44.922480 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.032165 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.042821 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.058908 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.075409 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.140957 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.218256 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.218550 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.255643 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.344915 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.365393 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.403729 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.436849 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.443282 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.474876 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.497770 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.617231 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.676342 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.853171 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.853190 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.876758 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.900112 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 08:44:45 crc kubenswrapper[4576]: I1203 08:44:45.903038 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.044813 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.107160 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.115989 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.130123 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.133584 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.206636 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.219467 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.271304 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.276743 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.380898 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.412253 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.566514 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.571653 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.606175 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.707061 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.723163 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 08:44:46 crc kubenswrapper[4576]: I1203 08:44:46.924484 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.107994 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.113155 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.153930 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.182694 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.321992 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.334633 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.626961 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.652575 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.680468 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.899282 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 08:44:47 crc kubenswrapper[4576]: I1203 08:44:47.970580 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 08:44:48 crc kubenswrapper[4576]: I1203 08:44:48.532595 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 08:44:48 crc kubenswrapper[4576]: I1203 08:44:48.821462 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 08:44:48 crc kubenswrapper[4576]: I1203 08:44:48.848441 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.084408 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.385649 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.434036 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.581789 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.696558 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.696703 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840166 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840243 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840314 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840382 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840404 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840454 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840469 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840493 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.840563 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.841075 4576 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.841629 4576 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.841645 4576 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.841653 4576 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.849880 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.885116 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 08:44:49 crc kubenswrapper[4576]: I1203 08:44:49.942404 4576 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.286739 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.287024 4576 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6" exitCode=137 Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.287146 4576 scope.go:117] "RemoveContainer" containerID="0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.287338 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.313092 4576 scope.go:117] "RemoveContainer" containerID="0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6" Dec 03 08:44:50 crc kubenswrapper[4576]: E1203 08:44:50.313734 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6\": container with ID starting with 0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6 not found: ID does not exist" containerID="0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.313888 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6"} err="failed to get container status \"0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6\": rpc error: code = NotFound desc = could not find container \"0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6\": container with ID starting with 0ef7bd27e110af398acda212a62fc55c165031b2586405f6bb869e451370fec6 not found: ID does not exist" Dec 03 08:44:50 crc kubenswrapper[4576]: I1203 08:44:50.961504 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.409747 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.526729 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.690751 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.691251 4576 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.703893 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.703956 4576 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="bb17bf76-5378-4e4d-8667-5d687c334232" Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.708612 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 08:44:51 crc kubenswrapper[4576]: I1203 08:44:51.708671 4576 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="bb17bf76-5378-4e4d-8667-5d687c334232" Dec 03 08:44:53 crc kubenswrapper[4576]: I1203 08:44:53.192964 4576 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 03 08:44:53 crc kubenswrapper[4576]: I1203 08:44:53.681961 4576 scope.go:117] "RemoveContainer" containerID="8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd" Dec 03 08:44:53 crc kubenswrapper[4576]: E1203 08:44:53.682363 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 08:44:55 crc kubenswrapper[4576]: I1203 08:44:55.034160 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.165314 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz"] Dec 03 08:45:00 crc kubenswrapper[4576]: E1203 08:45:00.165970 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.165986 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 08:45:00 crc kubenswrapper[4576]: E1203 08:45:00.166044 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" containerName="installer" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.166051 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" containerName="installer" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.166156 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.166174 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8966b99c-3a63-452f-9150-dd0548ecd89a" containerName="installer" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.168055 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.170978 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.171183 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.173083 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz"] Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.285508 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcwsd\" (UniqueName: \"kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.285609 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.285637 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.387888 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcwsd\" (UniqueName: \"kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.387988 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.388034 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.389064 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.400899 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.412559 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcwsd\" (UniqueName: \"kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd\") pod \"collect-profiles-29412525-8cqsz\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.487355 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:00 crc kubenswrapper[4576]: I1203 08:45:00.705344 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz"] Dec 03 08:45:01 crc kubenswrapper[4576]: I1203 08:45:01.356031 4576 generic.go:334] "Generic (PLEG): container finished" podID="8455c8a5-3cf3-44b7-98dd-9d6564fb6994" containerID="12a0b594286b95b5a6d84e0fb781b5212bba03fa2940c0fbd328d136d89d2b65" exitCode=0 Dec 03 08:45:01 crc kubenswrapper[4576]: I1203 08:45:01.356086 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" event={"ID":"8455c8a5-3cf3-44b7-98dd-9d6564fb6994","Type":"ContainerDied","Data":"12a0b594286b95b5a6d84e0fb781b5212bba03fa2940c0fbd328d136d89d2b65"} Dec 03 08:45:01 crc kubenswrapper[4576]: I1203 08:45:01.356403 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" event={"ID":"8455c8a5-3cf3-44b7-98dd-9d6564fb6994","Type":"ContainerStarted","Data":"e89f22936081ce7c9a92dc8dfb9096458d1ae292f50e35a06eb5181a04d65960"} Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.552808 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.616275 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume\") pod \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.616334 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcwsd\" (UniqueName: \"kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd\") pod \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.616362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume\") pod \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\" (UID: \"8455c8a5-3cf3-44b7-98dd-9d6564fb6994\") " Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.619248 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume" (OuterVolumeSpecName: "config-volume") pod "8455c8a5-3cf3-44b7-98dd-9d6564fb6994" (UID: "8455c8a5-3cf3-44b7-98dd-9d6564fb6994"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.623080 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8455c8a5-3cf3-44b7-98dd-9d6564fb6994" (UID: "8455c8a5-3cf3-44b7-98dd-9d6564fb6994"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.630727 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd" (OuterVolumeSpecName: "kube-api-access-qcwsd") pod "8455c8a5-3cf3-44b7-98dd-9d6564fb6994" (UID: "8455c8a5-3cf3-44b7-98dd-9d6564fb6994"). InnerVolumeSpecName "kube-api-access-qcwsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.717395 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.717437 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcwsd\" (UniqueName: \"kubernetes.io/projected/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-kube-api-access-qcwsd\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:02 crc kubenswrapper[4576]: I1203 08:45:02.717543 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8455c8a5-3cf3-44b7-98dd-9d6564fb6994-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:03 crc kubenswrapper[4576]: I1203 08:45:03.368773 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" event={"ID":"8455c8a5-3cf3-44b7-98dd-9d6564fb6994","Type":"ContainerDied","Data":"e89f22936081ce7c9a92dc8dfb9096458d1ae292f50e35a06eb5181a04d65960"} Dec 03 08:45:03 crc kubenswrapper[4576]: I1203 08:45:03.368816 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e89f22936081ce7c9a92dc8dfb9096458d1ae292f50e35a06eb5181a04d65960" Dec 03 08:45:03 crc kubenswrapper[4576]: I1203 08:45:03.368897 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz" Dec 03 08:45:06 crc kubenswrapper[4576]: I1203 08:45:06.387044 4576 generic.go:334] "Generic (PLEG): container finished" podID="2541618c-f550-48ef-9316-77a5dd7f1084" containerID="4b1273cfc28de5af495b51f60a1cc5facbdcb34b68b97741f59f227322037917" exitCode=0 Dec 03 08:45:06 crc kubenswrapper[4576]: I1203 08:45:06.387488 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" event={"ID":"2541618c-f550-48ef-9316-77a5dd7f1084","Type":"ContainerDied","Data":"4b1273cfc28de5af495b51f60a1cc5facbdcb34b68b97741f59f227322037917"} Dec 03 08:45:06 crc kubenswrapper[4576]: I1203 08:45:06.388275 4576 scope.go:117] "RemoveContainer" containerID="4b1273cfc28de5af495b51f60a1cc5facbdcb34b68b97741f59f227322037917" Dec 03 08:45:06 crc kubenswrapper[4576]: I1203 08:45:06.678145 4576 scope.go:117] "RemoveContainer" containerID="8fd24b727cb9f82e48b40ef53208361f798435963ba6dce6986e75385aa83bbd" Dec 03 08:45:07 crc kubenswrapper[4576]: I1203 08:45:07.402328 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" event={"ID":"2541618c-f550-48ef-9316-77a5dd7f1084","Type":"ContainerStarted","Data":"351871c68a336dcd75ae9cbe6efcaf9655728aeeef7d2a90d7a367479728e674"} Dec 03 08:45:07 crc kubenswrapper[4576]: I1203 08:45:07.403486 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:45:07 crc kubenswrapper[4576]: I1203 08:45:07.405581 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Dec 03 08:45:07 crc kubenswrapper[4576]: I1203 08:45:07.405637 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a9f3dca2f4b3a8b994efe33f99429e3fb162969e554f21d8221fc14e86145157"} Dec 03 08:45:07 crc kubenswrapper[4576]: I1203 08:45:07.406896 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.455991 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.456800 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" containerID="cri-o://fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c" gracePeriod=30 Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.498623 4576 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-km29k container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": read tcp 10.217.0.2:57130->10.217.0.9:8443: read: connection reset by peer" start-of-body= Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.498816 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": read tcp 10.217.0.2:57130->10.217.0.9:8443: read: connection reset by peer" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.545077 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.545343 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" containerID="cri-o://0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4" gracePeriod=30 Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.735962 4576 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-gnxs8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.736032 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.829990 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.882255 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.991848 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert\") pod \"c82788b8-bd1f-49c5-8f63-e8e360008f40\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.991914 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert\") pod \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.991935 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles\") pod \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.991956 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config\") pod \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.992012 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config\") pod \"c82788b8-bd1f-49c5-8f63-e8e360008f40\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.992060 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca\") pod \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.992085 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dtfm\" (UniqueName: \"kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm\") pod \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\" (UID: \"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.992112 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjgq6\" (UniqueName: \"kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6\") pod \"c82788b8-bd1f-49c5-8f63-e8e360008f40\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.992134 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca\") pod \"c82788b8-bd1f-49c5-8f63-e8e360008f40\" (UID: \"c82788b8-bd1f-49c5-8f63-e8e360008f40\") " Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.993329 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config" (OuterVolumeSpecName: "config") pod "c82788b8-bd1f-49c5-8f63-e8e360008f40" (UID: "c82788b8-bd1f-49c5-8f63-e8e360008f40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.993651 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca" (OuterVolumeSpecName: "client-ca") pod "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" (UID: "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.993658 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca" (OuterVolumeSpecName: "client-ca") pod "c82788b8-bd1f-49c5-8f63-e8e360008f40" (UID: "c82788b8-bd1f-49c5-8f63-e8e360008f40"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.993724 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" (UID: "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.994068 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config" (OuterVolumeSpecName: "config") pod "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" (UID: "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.998578 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c82788b8-bd1f-49c5-8f63-e8e360008f40" (UID: "c82788b8-bd1f-49c5-8f63-e8e360008f40"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.998657 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm" (OuterVolumeSpecName: "kube-api-access-9dtfm") pod "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" (UID: "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343"). InnerVolumeSpecName "kube-api-access-9dtfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.998776 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" (UID: "cdcaf314-ef4a-4f8e-90fb-a597bcd1f343"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:15 crc kubenswrapper[4576]: I1203 08:45:15.998859 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6" (OuterVolumeSpecName: "kube-api-access-sjgq6") pod "c82788b8-bd1f-49c5-8f63-e8e360008f40" (UID: "c82788b8-bd1f-49c5-8f63-e8e360008f40"). InnerVolumeSpecName "kube-api-access-sjgq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093767 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093840 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dtfm\" (UniqueName: \"kubernetes.io/projected/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-kube-api-access-9dtfm\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093863 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjgq6\" (UniqueName: \"kubernetes.io/projected/c82788b8-bd1f-49c5-8f63-e8e360008f40-kube-api-access-sjgq6\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093884 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093901 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c82788b8-bd1f-49c5-8f63-e8e360008f40-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093917 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093933 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093951 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.093967 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82788b8-bd1f-49c5-8f63-e8e360008f40-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.480501 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" event={"ID":"c82788b8-bd1f-49c5-8f63-e8e360008f40","Type":"ContainerDied","Data":"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4"} Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.480505 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.480705 4576 scope.go:117] "RemoveContainer" containerID="0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.480717 4576 generic.go:334] "Generic (PLEG): container finished" podID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerID="0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4" exitCode=0 Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.480922 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8" event={"ID":"c82788b8-bd1f-49c5-8f63-e8e360008f40","Type":"ContainerDied","Data":"bd5903104263fc81f3d7d3d4f7bf96e5f9f1ddffa3e09e780522b0162ced15ee"} Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.486990 4576 generic.go:334] "Generic (PLEG): container finished" podID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerID="fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c" exitCode=0 Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.487034 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" event={"ID":"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343","Type":"ContainerDied","Data":"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c"} Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.487066 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" event={"ID":"cdcaf314-ef4a-4f8e-90fb-a597bcd1f343","Type":"ContainerDied","Data":"68a752e2102b8abfbce82650c6ed176893712461426027f30fade658a25e5a16"} Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.487126 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-km29k" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.510604 4576 scope.go:117] "RemoveContainer" containerID="0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4" Dec 03 08:45:16 crc kubenswrapper[4576]: E1203 08:45:16.511071 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4\": container with ID starting with 0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4 not found: ID does not exist" containerID="0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.511138 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4"} err="failed to get container status \"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4\": rpc error: code = NotFound desc = could not find container \"0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4\": container with ID starting with 0b3dfe1ebcfde9f6d22758a71e03f9a0350d0ec55e47074169890faf0e427cb4 not found: ID does not exist" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.511173 4576 scope.go:117] "RemoveContainer" containerID="fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.523081 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.541868 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gnxs8"] Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.542675 4576 scope.go:117] "RemoveContainer" containerID="fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c" Dec 03 08:45:16 crc kubenswrapper[4576]: E1203 08:45:16.543256 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c\": container with ID starting with fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c not found: ID does not exist" containerID="fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.543385 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c"} err="failed to get container status \"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c\": rpc error: code = NotFound desc = could not find container \"fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c\": container with ID starting with fecfd15b802de61a1a9b30fa31bc6bb4786f1286db2343e8da28e32fef4bb18c not found: ID does not exist" Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.545194 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:45:16 crc kubenswrapper[4576]: I1203 08:45:16.548309 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-km29k"] Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.284960 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:17 crc kubenswrapper[4576]: E1203 08:45:17.285636 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8455c8a5-3cf3-44b7-98dd-9d6564fb6994" containerName="collect-profiles" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285655 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8455c8a5-3cf3-44b7-98dd-9d6564fb6994" containerName="collect-profiles" Dec 03 08:45:17 crc kubenswrapper[4576]: E1203 08:45:17.285676 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285685 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: E1203 08:45:17.285706 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285715 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285857 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8455c8a5-3cf3-44b7-98dd-9d6564fb6994" containerName="collect-profiles" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285876 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" containerName="controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.285887 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" containerName="route-controller-manager" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.286631 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.289340 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.291371 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.291568 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.292397 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.293887 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.294051 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.294200 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.295285 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.295691 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.295950 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.296273 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.338588 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.342604 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.342661 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.343103 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.349752 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.361101 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.435922 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.435975 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436420 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436458 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdnlf\" (UniqueName: \"kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436482 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98kph\" (UniqueName: \"kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436498 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436606 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436637 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.436662 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.537976 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538022 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538048 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538080 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538108 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538134 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538172 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdnlf\" (UniqueName: \"kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538205 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98kph\" (UniqueName: \"kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.538226 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.539192 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.539307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.539463 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.540414 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.540509 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.553633 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.559184 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98kph\" (UniqueName: \"kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.559382 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert\") pod \"controller-manager-5cf4f8495c-57q96\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.584463 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdnlf\" (UniqueName: \"kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf\") pod \"route-controller-manager-596db47495-qmpdq\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.657201 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.666864 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.684726 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c82788b8-bd1f-49c5-8f63-e8e360008f40" path="/var/lib/kubelet/pods/c82788b8-bd1f-49c5-8f63-e8e360008f40/volumes" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.685578 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdcaf314-ef4a-4f8e-90fb-a597bcd1f343" path="/var/lib/kubelet/pods/cdcaf314-ef4a-4f8e-90fb-a597bcd1f343/volumes" Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.927154 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:17 crc kubenswrapper[4576]: I1203 08:45:17.967606 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:17 crc kubenswrapper[4576]: W1203 08:45:17.985827 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d82f2a3_348f_46bf_8a02_4c3f42eaf4b6.slice/crio-3519a763fe6672c90ab7d4eca0ca3a98207c254e3e9d585f3ae567db3788a05d WatchSource:0}: Error finding container 3519a763fe6672c90ab7d4eca0ca3a98207c254e3e9d585f3ae567db3788a05d: Status 404 returned error can't find the container with id 3519a763fe6672c90ab7d4eca0ca3a98207c254e3e9d585f3ae567db3788a05d Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.508351 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" event={"ID":"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6","Type":"ContainerStarted","Data":"b1f088a2b366aca4e9d81d29af8ba80268685238322ca74567e16cf46b99aa03"} Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.508715 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" event={"ID":"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6","Type":"ContainerStarted","Data":"3519a763fe6672c90ab7d4eca0ca3a98207c254e3e9d585f3ae567db3788a05d"} Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.508743 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.510637 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" event={"ID":"5a46176a-355f-4cbc-aa73-809fcb32ea1d","Type":"ContainerStarted","Data":"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806"} Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.510772 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.510842 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" event={"ID":"5a46176a-355f-4cbc-aa73-809fcb32ea1d","Type":"ContainerStarted","Data":"46528bda3d93b4158ef5f994d079dc84c0d0764020163b62491846576a37824a"} Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.518055 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.521715 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.533195 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" podStartSLOduration=3.533157574 podStartE2EDuration="3.533157574s" podCreationTimestamp="2025-12-03 08:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:18.530950829 +0000 UTC m=+325.916927813" watchObservedRunningTime="2025-12-03 08:45:18.533157574 +0000 UTC m=+325.919134558" Dec 03 08:45:18 crc kubenswrapper[4576]: I1203 08:45:18.600396 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" podStartSLOduration=3.600378451 podStartE2EDuration="3.600378451s" podCreationTimestamp="2025-12-03 08:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:18.599938429 +0000 UTC m=+325.985915433" watchObservedRunningTime="2025-12-03 08:45:18.600378451 +0000 UTC m=+325.986355425" Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.502874 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.503864 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" podUID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" containerName="controller-manager" containerID="cri-o://b1f088a2b366aca4e9d81d29af8ba80268685238322ca74567e16cf46b99aa03" gracePeriod=30 Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.647206 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.647429 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" podUID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" containerName="route-controller-manager" containerID="cri-o://e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806" gracePeriod=30 Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.758235 4576 generic.go:334] "Generic (PLEG): container finished" podID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" containerID="b1f088a2b366aca4e9d81d29af8ba80268685238322ca74567e16cf46b99aa03" exitCode=0 Dec 03 08:45:46 crc kubenswrapper[4576]: I1203 08:45:46.758318 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" event={"ID":"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6","Type":"ContainerDied","Data":"b1f088a2b366aca4e9d81d29af8ba80268685238322ca74567e16cf46b99aa03"} Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.348089 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.351016 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432034 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles\") pod \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432097 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca\") pod \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432129 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98kph\" (UniqueName: \"kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph\") pod \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432155 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config\") pod \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432195 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config\") pod \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432258 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca\") pod \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432305 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert\") pod \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\" (UID: \"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432326 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert\") pod \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.432352 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdnlf\" (UniqueName: \"kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf\") pod \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\" (UID: \"5a46176a-355f-4cbc-aa73-809fcb32ea1d\") " Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.437720 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config" (OuterVolumeSpecName: "config") pod "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" (UID: "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.438162 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config" (OuterVolumeSpecName: "config") pod "5a46176a-355f-4cbc-aa73-809fcb32ea1d" (UID: "5a46176a-355f-4cbc-aa73-809fcb32ea1d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.437767 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca" (OuterVolumeSpecName: "client-ca") pod "5a46176a-355f-4cbc-aa73-809fcb32ea1d" (UID: "5a46176a-355f-4cbc-aa73-809fcb32ea1d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.442892 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" (UID: "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.443331 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph" (OuterVolumeSpecName: "kube-api-access-98kph") pod "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" (UID: "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6"). InnerVolumeSpecName "kube-api-access-98kph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.443766 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf" (OuterVolumeSpecName: "kube-api-access-kdnlf") pod "5a46176a-355f-4cbc-aa73-809fcb32ea1d" (UID: "5a46176a-355f-4cbc-aa73-809fcb32ea1d"). InnerVolumeSpecName "kube-api-access-kdnlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.443852 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca" (OuterVolumeSpecName: "client-ca") pod "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" (UID: "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.444894 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" (UID: "6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.447871 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5a46176a-355f-4cbc-aa73-809fcb32ea1d" (UID: "5a46176a-355f-4cbc-aa73-809fcb32ea1d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533620 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533662 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a46176a-355f-4cbc-aa73-809fcb32ea1d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533680 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdnlf\" (UniqueName: \"kubernetes.io/projected/5a46176a-355f-4cbc-aa73-809fcb32ea1d-kube-api-access-kdnlf\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533692 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533702 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533710 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98kph\" (UniqueName: \"kubernetes.io/projected/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-kube-api-access-98kph\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533720 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533727 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.533735 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a46176a-355f-4cbc-aa73-809fcb32ea1d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.766266 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.766272 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5cf4f8495c-57q96" event={"ID":"6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6","Type":"ContainerDied","Data":"3519a763fe6672c90ab7d4eca0ca3a98207c254e3e9d585f3ae567db3788a05d"} Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.766787 4576 scope.go:117] "RemoveContainer" containerID="b1f088a2b366aca4e9d81d29af8ba80268685238322ca74567e16cf46b99aa03" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.768318 4576 generic.go:334] "Generic (PLEG): container finished" podID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" containerID="e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806" exitCode=0 Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.768354 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" event={"ID":"5a46176a-355f-4cbc-aa73-809fcb32ea1d","Type":"ContainerDied","Data":"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806"} Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.768379 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" event={"ID":"5a46176a-355f-4cbc-aa73-809fcb32ea1d","Type":"ContainerDied","Data":"46528bda3d93b4158ef5f994d079dc84c0d0764020163b62491846576a37824a"} Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.768362 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.792001 4576 scope.go:117] "RemoveContainer" containerID="e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.812386 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.816906 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5cf4f8495c-57q96"] Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.822134 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.822399 4576 scope.go:117] "RemoveContainer" containerID="e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806" Dec 03 08:45:47 crc kubenswrapper[4576]: E1203 08:45:47.823067 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806\": container with ID starting with e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806 not found: ID does not exist" containerID="e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.823111 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806"} err="failed to get container status \"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806\": rpc error: code = NotFound desc = could not find container \"e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806\": container with ID starting with e63c9f217984ecea8aca95338085cd3af9481206ad0ceecfb54a1a103bbef806 not found: ID does not exist" Dec 03 08:45:47 crc kubenswrapper[4576]: I1203 08:45:47.826787 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596db47495-qmpdq"] Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.303088 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:48 crc kubenswrapper[4576]: E1203 08:45:48.303409 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" containerName="route-controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.303432 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" containerName="route-controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: E1203 08:45:48.303500 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" containerName="controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.303510 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" containerName="controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.303663 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" containerName="route-controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.303681 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" containerName="controller-manager" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.304221 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.308180 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.308480 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.308670 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.309054 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.308773 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.309319 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.310018 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.310989 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.316686 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.316978 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.317384 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.317753 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.318110 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.319426 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.328817 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.332024 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.332887 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.361589 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.361636 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xsg9\" (UniqueName: \"kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.361687 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.361721 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.361752 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.362097 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.362133 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.362197 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fmjk\" (UniqueName: \"kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.362214 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463002 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fmjk\" (UniqueName: \"kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463047 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463077 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463104 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xsg9\" (UniqueName: \"kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463139 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463158 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463185 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463212 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.463237 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.464609 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.464912 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.465177 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.465227 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.465582 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.468231 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.469107 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.483233 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xsg9\" (UniqueName: \"kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9\") pod \"controller-manager-77f667dfdd-4c4xt\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.485751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fmjk\" (UniqueName: \"kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk\") pod \"route-controller-manager-7f9fc89966-lr8ch\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.625567 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.669117 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.885332 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:48 crc kubenswrapper[4576]: I1203 08:45:48.931742 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:45:48 crc kubenswrapper[4576]: W1203 08:45:48.950077 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e711530_8991_4a6b_b90d_192439079fc4.slice/crio-7c20398336f87fffd0550cd11464ef7d4be66af82b1cd6c3c63926fc35c16e6f WatchSource:0}: Error finding container 7c20398336f87fffd0550cd11464ef7d4be66af82b1cd6c3c63926fc35c16e6f: Status 404 returned error can't find the container with id 7c20398336f87fffd0550cd11464ef7d4be66af82b1cd6c3c63926fc35c16e6f Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.687388 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a46176a-355f-4cbc-aa73-809fcb32ea1d" path="/var/lib/kubelet/pods/5a46176a-355f-4cbc-aa73-809fcb32ea1d/volumes" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.688593 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6" path="/var/lib/kubelet/pods/6d82f2a3-348f-46bf-8a02-4c3f42eaf4b6/volumes" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.789174 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" event={"ID":"9e711530-8991-4a6b-b90d-192439079fc4","Type":"ContainerStarted","Data":"b992be8de0b5f51dd8ddd9365d7308c5ac1f9ec09ae9d1e100e7555b557c11eb"} Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.789221 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" event={"ID":"9e711530-8991-4a6b-b90d-192439079fc4","Type":"ContainerStarted","Data":"7c20398336f87fffd0550cd11464ef7d4be66af82b1cd6c3c63926fc35c16e6f"} Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.789678 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.791109 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" event={"ID":"339cdbc2-2da8-4340-89e3-12003a03e985","Type":"ContainerStarted","Data":"995ca5b4c71ea9b959b3a94c96c85bb7d53da2efe0745fa2fef1c6102f38e88d"} Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.791140 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" event={"ID":"339cdbc2-2da8-4340-89e3-12003a03e985","Type":"ContainerStarted","Data":"7a918fb5c4678106d83844b32d0c8eaa133ee6eef0e7a80d5240f4a7f4056dc1"} Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.791309 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.803816 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.806796 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.889496 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" podStartSLOduration=3.8894460950000003 podStartE2EDuration="3.889446095s" podCreationTimestamp="2025-12-03 08:45:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:49.889429635 +0000 UTC m=+357.275406689" watchObservedRunningTime="2025-12-03 08:45:49.889446095 +0000 UTC m=+357.275423079" Dec 03 08:45:49 crc kubenswrapper[4576]: I1203 08:45:49.893684 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" podStartSLOduration=3.893658773 podStartE2EDuration="3.893658773s" podCreationTimestamp="2025-12-03 08:45:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:49.815557607 +0000 UTC m=+357.201534601" watchObservedRunningTime="2025-12-03 08:45:49.893658773 +0000 UTC m=+357.279635757" Dec 03 08:45:53 crc kubenswrapper[4576]: I1203 08:45:53.570412 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:53 crc kubenswrapper[4576]: I1203 08:45:53.571080 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" podUID="339cdbc2-2da8-4340-89e3-12003a03e985" containerName="route-controller-manager" containerID="cri-o://995ca5b4c71ea9b959b3a94c96c85bb7d53da2efe0745fa2fef1c6102f38e88d" gracePeriod=30 Dec 03 08:45:53 crc kubenswrapper[4576]: I1203 08:45:53.815118 4576 generic.go:334] "Generic (PLEG): container finished" podID="339cdbc2-2da8-4340-89e3-12003a03e985" containerID="995ca5b4c71ea9b959b3a94c96c85bb7d53da2efe0745fa2fef1c6102f38e88d" exitCode=0 Dec 03 08:45:53 crc kubenswrapper[4576]: I1203 08:45:53.815189 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" event={"ID":"339cdbc2-2da8-4340-89e3-12003a03e985","Type":"ContainerDied","Data":"995ca5b4c71ea9b959b3a94c96c85bb7d53da2efe0745fa2fef1c6102f38e88d"} Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.022842 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.119748 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert\") pod \"339cdbc2-2da8-4340-89e3-12003a03e985\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.119815 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config\") pod \"339cdbc2-2da8-4340-89e3-12003a03e985\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.119853 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca\") pod \"339cdbc2-2da8-4340-89e3-12003a03e985\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.119885 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fmjk\" (UniqueName: \"kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk\") pod \"339cdbc2-2da8-4340-89e3-12003a03e985\" (UID: \"339cdbc2-2da8-4340-89e3-12003a03e985\") " Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.120708 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca" (OuterVolumeSpecName: "client-ca") pod "339cdbc2-2da8-4340-89e3-12003a03e985" (UID: "339cdbc2-2da8-4340-89e3-12003a03e985"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.120733 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config" (OuterVolumeSpecName: "config") pod "339cdbc2-2da8-4340-89e3-12003a03e985" (UID: "339cdbc2-2da8-4340-89e3-12003a03e985"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.125951 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk" (OuterVolumeSpecName: "kube-api-access-7fmjk") pod "339cdbc2-2da8-4340-89e3-12003a03e985" (UID: "339cdbc2-2da8-4340-89e3-12003a03e985"). InnerVolumeSpecName "kube-api-access-7fmjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.132738 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "339cdbc2-2da8-4340-89e3-12003a03e985" (UID: "339cdbc2-2da8-4340-89e3-12003a03e985"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.220724 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/339cdbc2-2da8-4340-89e3-12003a03e985-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.220769 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.220782 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/339cdbc2-2da8-4340-89e3-12003a03e985-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.220794 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fmjk\" (UniqueName: \"kubernetes.io/projected/339cdbc2-2da8-4340-89e3-12003a03e985-kube-api-access-7fmjk\") on node \"crc\" DevicePath \"\"" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.821839 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" event={"ID":"339cdbc2-2da8-4340-89e3-12003a03e985","Type":"ContainerDied","Data":"7a918fb5c4678106d83844b32d0c8eaa133ee6eef0e7a80d5240f4a7f4056dc1"} Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.821895 4576 scope.go:117] "RemoveContainer" containerID="995ca5b4c71ea9b959b3a94c96c85bb7d53da2efe0745fa2fef1c6102f38e88d" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.821999 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch" Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.855240 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:54 crc kubenswrapper[4576]: I1203 08:45:54.858564 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9fc89966-lr8ch"] Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.309819 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j"] Dec 03 08:45:55 crc kubenswrapper[4576]: E1203 08:45:55.310414 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="339cdbc2-2da8-4340-89e3-12003a03e985" containerName="route-controller-manager" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.310608 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="339cdbc2-2da8-4340-89e3-12003a03e985" containerName="route-controller-manager" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.310911 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="339cdbc2-2da8-4340-89e3-12003a03e985" containerName="route-controller-manager" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.311641 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.313737 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.313843 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.314016 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.314052 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.315610 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.321851 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.322407 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j"] Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.434806 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d7d9502-499c-4244-803e-ccaf86de865c-serving-cert\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.435157 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-client-ca\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.435217 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx68r\" (UniqueName: \"kubernetes.io/projected/9d7d9502-499c-4244-803e-ccaf86de865c-kube-api-access-bx68r\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.435259 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-config\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.536391 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-client-ca\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.536463 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx68r\" (UniqueName: \"kubernetes.io/projected/9d7d9502-499c-4244-803e-ccaf86de865c-kube-api-access-bx68r\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.536509 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-config\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.536576 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d7d9502-499c-4244-803e-ccaf86de865c-serving-cert\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.537309 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-client-ca\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.537865 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d7d9502-499c-4244-803e-ccaf86de865c-config\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.544624 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d7d9502-499c-4244-803e-ccaf86de865c-serving-cert\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.558051 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx68r\" (UniqueName: \"kubernetes.io/projected/9d7d9502-499c-4244-803e-ccaf86de865c-kube-api-access-bx68r\") pod \"route-controller-manager-78ddcbb79d-p6t6j\" (UID: \"9d7d9502-499c-4244-803e-ccaf86de865c\") " pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.634572 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:55 crc kubenswrapper[4576]: I1203 08:45:55.686193 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="339cdbc2-2da8-4340-89e3-12003a03e985" path="/var/lib/kubelet/pods/339cdbc2-2da8-4340-89e3-12003a03e985/volumes" Dec 03 08:45:56 crc kubenswrapper[4576]: W1203 08:45:56.087109 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d7d9502_499c_4244_803e_ccaf86de865c.slice/crio-23fbacf05de9b0af32a311499f3129413d40a74d6c48e0e050582df79e6a3af7 WatchSource:0}: Error finding container 23fbacf05de9b0af32a311499f3129413d40a74d6c48e0e050582df79e6a3af7: Status 404 returned error can't find the container with id 23fbacf05de9b0af32a311499f3129413d40a74d6c48e0e050582df79e6a3af7 Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.087223 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j"] Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.837448 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" event={"ID":"9d7d9502-499c-4244-803e-ccaf86de865c","Type":"ContainerStarted","Data":"221779c889e5f9916150af237fedafd2ef854abfc18faa6869a8d13e0c070fc3"} Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.837516 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" event={"ID":"9d7d9502-499c-4244-803e-ccaf86de865c","Type":"ContainerStarted","Data":"23fbacf05de9b0af32a311499f3129413d40a74d6c48e0e050582df79e6a3af7"} Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.838623 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.842912 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" Dec 03 08:45:56 crc kubenswrapper[4576]: I1203 08:45:56.860764 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-78ddcbb79d-p6t6j" podStartSLOduration=3.860736957 podStartE2EDuration="3.860736957s" podCreationTimestamp="2025-12-03 08:45:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:45:56.8580057 +0000 UTC m=+364.243982694" watchObservedRunningTime="2025-12-03 08:45:56.860736957 +0000 UTC m=+364.246713961" Dec 03 08:46:09 crc kubenswrapper[4576]: I1203 08:46:09.682094 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:46:09 crc kubenswrapper[4576]: I1203 08:46:09.682809 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.371940 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqv4k"] Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.373201 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.388693 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqv4k"] Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.538670 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-registry-certificates\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.538979 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-bound-sa-token\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539127 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-registry-tls\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539275 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plb6r\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-kube-api-access-plb6r\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539408 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-trusted-ca\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539539 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37c13054-e843-419b-9692-3d794db5260f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539693 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.539811 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37c13054-e843-419b-9692-3d794db5260f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.568290 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641313 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plb6r\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-kube-api-access-plb6r\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641373 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-trusted-ca\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641413 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37c13054-e843-419b-9692-3d794db5260f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641475 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37c13054-e843-419b-9692-3d794db5260f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641626 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-registry-certificates\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641664 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-bound-sa-token\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.641698 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-registry-tls\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.642359 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37c13054-e843-419b-9692-3d794db5260f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.643092 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-registry-certificates\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.643746 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37c13054-e843-419b-9692-3d794db5260f-trusted-ca\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.648084 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37c13054-e843-419b-9692-3d794db5260f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.652105 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-registry-tls\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.662923 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plb6r\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-kube-api-access-plb6r\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.675555 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37c13054-e843-419b-9692-3d794db5260f-bound-sa-token\") pod \"image-registry-66df7c8f76-hqv4k\" (UID: \"37c13054-e843-419b-9692-3d794db5260f\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:15 crc kubenswrapper[4576]: I1203 08:46:15.690467 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:16 crc kubenswrapper[4576]: I1203 08:46:16.133952 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqv4k"] Dec 03 08:46:16 crc kubenswrapper[4576]: I1203 08:46:16.947899 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" event={"ID":"37c13054-e843-419b-9692-3d794db5260f","Type":"ContainerStarted","Data":"e30d4f4a630543e07b4594b95d9bacf801bda7efd5566406e05299cc390bc519"} Dec 03 08:46:16 crc kubenswrapper[4576]: I1203 08:46:16.947944 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" event={"ID":"37c13054-e843-419b-9692-3d794db5260f","Type":"ContainerStarted","Data":"82b3df325945a87795c11e013a60b456cafbc92b3f45c620d9c3ac59e6f62bfb"} Dec 03 08:46:16 crc kubenswrapper[4576]: I1203 08:46:16.949466 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:16 crc kubenswrapper[4576]: I1203 08:46:16.979571 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" podStartSLOduration=1.979544486 podStartE2EDuration="1.979544486s" podCreationTimestamp="2025-12-03 08:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:46:16.976875112 +0000 UTC m=+384.362852106" watchObservedRunningTime="2025-12-03 08:46:16.979544486 +0000 UTC m=+384.365521470" Dec 03 08:46:26 crc kubenswrapper[4576]: I1203 08:46:26.431828 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:46:26 crc kubenswrapper[4576]: I1203 08:46:26.432886 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" podUID="9e711530-8991-4a6b-b90d-192439079fc4" containerName="controller-manager" containerID="cri-o://b992be8de0b5f51dd8ddd9365d7308c5ac1f9ec09ae9d1e100e7555b557c11eb" gracePeriod=30 Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.117086 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" event={"ID":"9e711530-8991-4a6b-b90d-192439079fc4","Type":"ContainerDied","Data":"b992be8de0b5f51dd8ddd9365d7308c5ac1f9ec09ae9d1e100e7555b557c11eb"} Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.117053 4576 generic.go:334] "Generic (PLEG): container finished" podID="9e711530-8991-4a6b-b90d-192439079fc4" containerID="b992be8de0b5f51dd8ddd9365d7308c5ac1f9ec09ae9d1e100e7555b557c11eb" exitCode=0 Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.355215 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.534367 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert\") pod \"9e711530-8991-4a6b-b90d-192439079fc4\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.534446 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config\") pod \"9e711530-8991-4a6b-b90d-192439079fc4\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.534507 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xsg9\" (UniqueName: \"kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9\") pod \"9e711530-8991-4a6b-b90d-192439079fc4\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.534551 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles\") pod \"9e711530-8991-4a6b-b90d-192439079fc4\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.534648 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca\") pod \"9e711530-8991-4a6b-b90d-192439079fc4\" (UID: \"9e711530-8991-4a6b-b90d-192439079fc4\") " Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.535716 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca" (OuterVolumeSpecName: "client-ca") pod "9e711530-8991-4a6b-b90d-192439079fc4" (UID: "9e711530-8991-4a6b-b90d-192439079fc4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.535755 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "9e711530-8991-4a6b-b90d-192439079fc4" (UID: "9e711530-8991-4a6b-b90d-192439079fc4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.535785 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config" (OuterVolumeSpecName: "config") pod "9e711530-8991-4a6b-b90d-192439079fc4" (UID: "9e711530-8991-4a6b-b90d-192439079fc4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.540664 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9e711530-8991-4a6b-b90d-192439079fc4" (UID: "9e711530-8991-4a6b-b90d-192439079fc4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.540729 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9" (OuterVolumeSpecName: "kube-api-access-6xsg9") pod "9e711530-8991-4a6b-b90d-192439079fc4" (UID: "9e711530-8991-4a6b-b90d-192439079fc4"). InnerVolumeSpecName "kube-api-access-6xsg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.636100 4576 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e711530-8991-4a6b-b90d-192439079fc4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.636134 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.636146 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xsg9\" (UniqueName: \"kubernetes.io/projected/9e711530-8991-4a6b-b90d-192439079fc4-kube-api-access-6xsg9\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.636156 4576 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:27 crc kubenswrapper[4576]: I1203 08:46:27.636166 4576 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e711530-8991-4a6b-b90d-192439079fc4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.125243 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" event={"ID":"9e711530-8991-4a6b-b90d-192439079fc4","Type":"ContainerDied","Data":"7c20398336f87fffd0550cd11464ef7d4be66af82b1cd6c3c63926fc35c16e6f"} Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.125307 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f667dfdd-4c4xt" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.125345 4576 scope.go:117] "RemoveContainer" containerID="b992be8de0b5f51dd8ddd9365d7308c5ac1f9ec09ae9d1e100e7555b557c11eb" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.143903 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.152414 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-77f667dfdd-4c4xt"] Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.436447 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5584579fdf-v2mxq"] Dec 03 08:46:28 crc kubenswrapper[4576]: E1203 08:46:28.437167 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e711530-8991-4a6b-b90d-192439079fc4" containerName="controller-manager" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.437190 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e711530-8991-4a6b-b90d-192439079fc4" containerName="controller-manager" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.437317 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e711530-8991-4a6b-b90d-192439079fc4" containerName="controller-manager" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.438044 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.440416 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.440688 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.440894 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.441192 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.442382 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.443770 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.447372 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.455128 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5584579fdf-v2mxq"] Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.568519 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-proxy-ca-bundles\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.569499 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-client-ca\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.569638 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjjkl\" (UniqueName: \"kubernetes.io/projected/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-kube-api-access-wjjkl\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.569743 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-config\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.569866 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-serving-cert\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.670657 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-proxy-ca-bundles\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.670737 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-client-ca\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.670766 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjjkl\" (UniqueName: \"kubernetes.io/projected/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-kube-api-access-wjjkl\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.670791 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-config\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.670852 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-serving-cert\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.671855 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-client-ca\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.672303 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-config\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.673110 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-proxy-ca-bundles\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.676926 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-serving-cert\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.720199 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjjkl\" (UniqueName: \"kubernetes.io/projected/d8d9926d-5d1b-4271-90f6-5d6f4aa37921-kube-api-access-wjjkl\") pod \"controller-manager-5584579fdf-v2mxq\" (UID: \"d8d9926d-5d1b-4271-90f6-5d6f4aa37921\") " pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.757103 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:28 crc kubenswrapper[4576]: I1203 08:46:28.970808 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5584579fdf-v2mxq"] Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.136575 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" event={"ID":"d8d9926d-5d1b-4271-90f6-5d6f4aa37921","Type":"ContainerStarted","Data":"14b1d96f2d3424bf40970de2e23c2cf0a6e19194c39db1cce6d411711d14e3b1"} Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.136651 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" event={"ID":"d8d9926d-5d1b-4271-90f6-5d6f4aa37921","Type":"ContainerStarted","Data":"7310d320bf2f7c96b7564ec5050f87d85943bf2df0f93d142583ee5bf763804b"} Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.136922 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.146716 4576 patch_prober.go:28] interesting pod/controller-manager-5584579fdf-v2mxq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.146819 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" podUID="d8d9926d-5d1b-4271-90f6-5d6f4aa37921" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.158481 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" podStartSLOduration=3.158362567 podStartE2EDuration="3.158362567s" podCreationTimestamp="2025-12-03 08:46:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:46:29.156825965 +0000 UTC m=+396.542802999" watchObservedRunningTime="2025-12-03 08:46:29.158362567 +0000 UTC m=+396.544339561" Dec 03 08:46:29 crc kubenswrapper[4576]: I1203 08:46:29.683869 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e711530-8991-4a6b-b90d-192439079fc4" path="/var/lib/kubelet/pods/9e711530-8991-4a6b-b90d-192439079fc4/volumes" Dec 03 08:46:30 crc kubenswrapper[4576]: I1203 08:46:30.147795 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5584579fdf-v2mxq" Dec 03 08:46:35 crc kubenswrapper[4576]: I1203 08:46:35.698445 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hqv4k" Dec 03 08:46:35 crc kubenswrapper[4576]: I1203 08:46:35.755566 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.665584 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.667986 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gmpkg" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="registry-server" containerID="cri-o://9b3bd9c10f35901b401f565cb3d769555bd31516c74c5ee017f3267e8b8be99a" gracePeriod=30 Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.686928 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.687251 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rxb6h" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="registry-server" containerID="cri-o://7b4a4265abf22e3f3d5d5f479cce79a6e4038d9d11a2b95d0b88c634b34b3126" gracePeriod=30 Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.693267 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.693552 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" containerID="cri-o://351871c68a336dcd75ae9cbe6efcaf9655728aeeef7d2a90d7a367479728e674" gracePeriod=30 Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.705733 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.705978 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lcs7f" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="registry-server" containerID="cri-o://5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" gracePeriod=30 Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.710891 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.711367 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2ngh2" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="registry-server" containerID="cri-o://dc835fbf2a0ad9458c6e49d394afc9dd83042c17eee9a776e680d302f1b4eb78" gracePeriod=30 Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.723271 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.724310 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.742325 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.866518 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfsxt\" (UniqueName: \"kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.866625 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.866648 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.968274 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.968325 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.968389 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfsxt\" (UniqueName: \"kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.969625 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.983904 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:37 crc kubenswrapper[4576]: I1203 08:46:37.987422 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfsxt\" (UniqueName: \"kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt\") pod \"marketplace-operator-79b997595-jf9sv\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.060943 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.208594 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerDied","Data":"7b4a4265abf22e3f3d5d5f479cce79a6e4038d9d11a2b95d0b88c634b34b3126"} Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.208450 4576 generic.go:334] "Generic (PLEG): container finished" podID="711712fe-5398-42c5-bff6-d8f984c47764" containerID="7b4a4265abf22e3f3d5d5f479cce79a6e4038d9d11a2b95d0b88c634b34b3126" exitCode=0 Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.232125 4576 generic.go:334] "Generic (PLEG): container finished" podID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerID="9b3bd9c10f35901b401f565cb3d769555bd31516c74c5ee017f3267e8b8be99a" exitCode=0 Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.232255 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerDied","Data":"9b3bd9c10f35901b401f565cb3d769555bd31516c74c5ee017f3267e8b8be99a"} Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.235864 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.241204 4576 generic.go:334] "Generic (PLEG): container finished" podID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerID="5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" exitCode=0 Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.246249 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerDied","Data":"5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d"} Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.258005 4576 generic.go:334] "Generic (PLEG): container finished" podID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerID="dc835fbf2a0ad9458c6e49d394afc9dd83042c17eee9a776e680d302f1b4eb78" exitCode=0 Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.258296 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerDied","Data":"dc835fbf2a0ad9458c6e49d394afc9dd83042c17eee9a776e680d302f1b4eb78"} Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.261360 4576 generic.go:334] "Generic (PLEG): container finished" podID="2541618c-f550-48ef-9316-77a5dd7f1084" containerID="351871c68a336dcd75ae9cbe6efcaf9655728aeeef7d2a90d7a367479728e674" exitCode=0 Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.261580 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.261629 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mrptv" event={"ID":"2541618c-f550-48ef-9316-77a5dd7f1084","Type":"ContainerDied","Data":"351871c68a336dcd75ae9cbe6efcaf9655728aeeef7d2a90d7a367479728e674"} Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.262715 4576 scope.go:117] "RemoveContainer" containerID="351871c68a336dcd75ae9cbe6efcaf9655728aeeef7d2a90d7a367479728e674" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.280208 4576 scope.go:117] "RemoveContainer" containerID="4b1273cfc28de5af495b51f60a1cc5facbdcb34b68b97741f59f227322037917" Dec 03 08:46:38 crc kubenswrapper[4576]: E1203 08:46:38.281774 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d is running failed: container process not found" containerID="5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:46:38 crc kubenswrapper[4576]: E1203 08:46:38.283465 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d is running failed: container process not found" containerID="5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:46:38 crc kubenswrapper[4576]: E1203 08:46:38.283881 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d is running failed: container process not found" containerID="5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:46:38 crc kubenswrapper[4576]: E1203 08:46:38.283970 4576 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-lcs7f" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="registry-server" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.382750 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca\") pod \"2541618c-f550-48ef-9316-77a5dd7f1084\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.382811 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics\") pod \"2541618c-f550-48ef-9316-77a5dd7f1084\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.382912 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jm7l\" (UniqueName: \"kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l\") pod \"2541618c-f550-48ef-9316-77a5dd7f1084\" (UID: \"2541618c-f550-48ef-9316-77a5dd7f1084\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.385929 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "2541618c-f550-48ef-9316-77a5dd7f1084" (UID: "2541618c-f550-48ef-9316-77a5dd7f1084"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.390217 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l" (OuterVolumeSpecName: "kube-api-access-4jm7l") pod "2541618c-f550-48ef-9316-77a5dd7f1084" (UID: "2541618c-f550-48ef-9316-77a5dd7f1084"). InnerVolumeSpecName "kube-api-access-4jm7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.395803 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "2541618c-f550-48ef-9316-77a5dd7f1084" (UID: "2541618c-f550-48ef-9316-77a5dd7f1084"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.484576 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jm7l\" (UniqueName: \"kubernetes.io/projected/2541618c-f550-48ef-9316-77a5dd7f1084-kube-api-access-4jm7l\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.484930 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.484943 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2541618c-f550-48ef-9316-77a5dd7f1084-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.497405 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.559925 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.562260 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.625983 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.638886 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.638958 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mrptv"] Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686074 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities\") pod \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686146 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxwd6\" (UniqueName: \"kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6\") pod \"701bd1ac-86c3-48f7-b195-490b5e187f00\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686181 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content\") pod \"701bd1ac-86c3-48f7-b195-490b5e187f00\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686201 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content\") pod \"711712fe-5398-42c5-bff6-d8f984c47764\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686227 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities\") pod \"711712fe-5398-42c5-bff6-d8f984c47764\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686260 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content\") pod \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686273 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities\") pod \"701bd1ac-86c3-48f7-b195-490b5e187f00\" (UID: \"701bd1ac-86c3-48f7-b195-490b5e187f00\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686321 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s8vg\" (UniqueName: \"kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg\") pod \"711712fe-5398-42c5-bff6-d8f984c47764\" (UID: \"711712fe-5398-42c5-bff6-d8f984c47764\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.686352 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwj2f\" (UniqueName: \"kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f\") pod \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\" (UID: \"b420867b-5c27-4b98-a2ab-0dd31175f5c0\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.688087 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities" (OuterVolumeSpecName: "utilities") pod "701bd1ac-86c3-48f7-b195-490b5e187f00" (UID: "701bd1ac-86c3-48f7-b195-490b5e187f00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.691760 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f" (OuterVolumeSpecName: "kube-api-access-rwj2f") pod "b420867b-5c27-4b98-a2ab-0dd31175f5c0" (UID: "b420867b-5c27-4b98-a2ab-0dd31175f5c0"). InnerVolumeSpecName "kube-api-access-rwj2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.692140 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg" (OuterVolumeSpecName: "kube-api-access-9s8vg") pod "711712fe-5398-42c5-bff6-d8f984c47764" (UID: "711712fe-5398-42c5-bff6-d8f984c47764"). InnerVolumeSpecName "kube-api-access-9s8vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.694730 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities" (OuterVolumeSpecName: "utilities") pod "b420867b-5c27-4b98-a2ab-0dd31175f5c0" (UID: "b420867b-5c27-4b98-a2ab-0dd31175f5c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.695934 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6" (OuterVolumeSpecName: "kube-api-access-fxwd6") pod "701bd1ac-86c3-48f7-b195-490b5e187f00" (UID: "701bd1ac-86c3-48f7-b195-490b5e187f00"). InnerVolumeSpecName "kube-api-access-fxwd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.696955 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities" (OuterVolumeSpecName: "utilities") pod "711712fe-5398-42c5-bff6-d8f984c47764" (UID: "711712fe-5398-42c5-bff6-d8f984c47764"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.708971 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b420867b-5c27-4b98-a2ab-0dd31175f5c0" (UID: "b420867b-5c27-4b98-a2ab-0dd31175f5c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.758327 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.776904 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "711712fe-5398-42c5-bff6-d8f984c47764" (UID: "711712fe-5398-42c5-bff6-d8f984c47764"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.788761 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content\") pod \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.788875 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v5th\" (UniqueName: \"kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th\") pod \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.788972 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities\") pod \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\" (UID: \"b040fd94-da16-41e4-8fdf-66b7a8e05d87\") " Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789243 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789255 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/711712fe-5398-42c5-bff6-d8f984c47764-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789264 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789273 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789282 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s8vg\" (UniqueName: \"kubernetes.io/projected/711712fe-5398-42c5-bff6-d8f984c47764-kube-api-access-9s8vg\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789292 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwj2f\" (UniqueName: \"kubernetes.io/projected/b420867b-5c27-4b98-a2ab-0dd31175f5c0-kube-api-access-rwj2f\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.789351 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b420867b-5c27-4b98-a2ab-0dd31175f5c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.790046 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxwd6\" (UniqueName: \"kubernetes.io/projected/701bd1ac-86c3-48f7-b195-490b5e187f00-kube-api-access-fxwd6\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.792480 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities" (OuterVolumeSpecName: "utilities") pod "b040fd94-da16-41e4-8fdf-66b7a8e05d87" (UID: "b040fd94-da16-41e4-8fdf-66b7a8e05d87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.795319 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th" (OuterVolumeSpecName: "kube-api-access-5v5th") pod "b040fd94-da16-41e4-8fdf-66b7a8e05d87" (UID: "b040fd94-da16-41e4-8fdf-66b7a8e05d87"). InnerVolumeSpecName "kube-api-access-5v5th". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.847304 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "701bd1ac-86c3-48f7-b195-490b5e187f00" (UID: "701bd1ac-86c3-48f7-b195-490b5e187f00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.852894 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b040fd94-da16-41e4-8fdf-66b7a8e05d87" (UID: "b040fd94-da16-41e4-8fdf-66b7a8e05d87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.891689 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.891768 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v5th\" (UniqueName: \"kubernetes.io/projected/b040fd94-da16-41e4-8fdf-66b7a8e05d87-kube-api-access-5v5th\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.891782 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/701bd1ac-86c3-48f7-b195-490b5e187f00-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:38 crc kubenswrapper[4576]: I1203 08:46:38.891792 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b040fd94-da16-41e4-8fdf-66b7a8e05d87-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.270895 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxb6h" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.270924 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxb6h" event={"ID":"711712fe-5398-42c5-bff6-d8f984c47764","Type":"ContainerDied","Data":"6273fcb9889efac38a16b6f16d51c7f365a440316c1894ad17942f3595d4fd6b"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.271410 4576 scope.go:117] "RemoveContainer" containerID="7b4a4265abf22e3f3d5d5f479cce79a6e4038d9d11a2b95d0b88c634b34b3126" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.272797 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" event={"ID":"1d84683d-c810-4ef4-bd1c-6b17ed4c135d","Type":"ContainerStarted","Data":"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.272823 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" event={"ID":"1d84683d-c810-4ef4-bd1c-6b17ed4c135d","Type":"ContainerStarted","Data":"e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.273645 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.275805 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmpkg" event={"ID":"b040fd94-da16-41e4-8fdf-66b7a8e05d87","Type":"ContainerDied","Data":"c0608a5294a516da817a3f1587a29083e6f231aa4391f16467fc7b9c32863ae4"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.275869 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmpkg" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.279263 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcs7f" event={"ID":"b420867b-5c27-4b98-a2ab-0dd31175f5c0","Type":"ContainerDied","Data":"ed60e516e9b3106b79aad8e7a8553d841d64085db01212e46ca27e1134ea6f35"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.279343 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcs7f" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.285788 4576 scope.go:117] "RemoveContainer" containerID="c58d9bd62a324d85836a4f3c0807c553b317bf9e351551258d8bde31385860bd" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.286832 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ngh2" event={"ID":"701bd1ac-86c3-48f7-b195-490b5e187f00","Type":"ContainerDied","Data":"8a42f3c93dab6f4025b6d4aa8c1cc5ef143652c7d2bc0645f9577cb372331976"} Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.286909 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ngh2" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.307117 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.314761 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" podStartSLOduration=2.314732748 podStartE2EDuration="2.314732748s" podCreationTimestamp="2025-12-03 08:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:46:39.310285747 +0000 UTC m=+406.696262741" watchObservedRunningTime="2025-12-03 08:46:39.314732748 +0000 UTC m=+406.700709732" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.326252 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.334017 4576 scope.go:117] "RemoveContainer" containerID="8ac9ac909f87dce2f70dce3d8c6fd046aa4d76aed83980890cc4041697630274" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.345753 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rxb6h"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.378742 4576 scope.go:117] "RemoveContainer" containerID="9b3bd9c10f35901b401f565cb3d769555bd31516c74c5ee017f3267e8b8be99a" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.381402 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.386216 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gmpkg"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.399383 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.407214 4576 scope.go:117] "RemoveContainer" containerID="7d0c6b32c01e007486fd10b40977174d8c807cc402a50c967b5f60814e289130" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.418038 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcs7f"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.430987 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.436440 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2ngh2"] Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.440966 4576 scope.go:117] "RemoveContainer" containerID="f79586934c1abd195376e844447549c7a1844f8fd29abb9857809ed353f3b1f0" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.455463 4576 scope.go:117] "RemoveContainer" containerID="5977173bfde796ab362a15efe5192dfc4331b1a451156464272ffe6bb921352d" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.474823 4576 scope.go:117] "RemoveContainer" containerID="29058e7ff8417fd1ab3e2bd682d6777a0f1cc9a72717445e00cfafb72b93ef49" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.488356 4576 scope.go:117] "RemoveContainer" containerID="a4189fe76020d6fdfdf95244ddbbc7f4284b5a5e5a7d064f5a1920529caf751b" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.506212 4576 scope.go:117] "RemoveContainer" containerID="dc835fbf2a0ad9458c6e49d394afc9dd83042c17eee9a776e680d302f1b4eb78" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.517744 4576 scope.go:117] "RemoveContainer" containerID="0a0048aae2828e686a5b8c863d3981cd9bd199a1f0ffc0b239515bf6b0ccfc29" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.531781 4576 scope.go:117] "RemoveContainer" containerID="801fecd290425b9c26dc28a1146560999d522975623f6194d4d5f22a7301f2e0" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.680308 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.680368 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.686290 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" path="/var/lib/kubelet/pods/2541618c-f550-48ef-9316-77a5dd7f1084/volumes" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.686794 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" path="/var/lib/kubelet/pods/701bd1ac-86c3-48f7-b195-490b5e187f00/volumes" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.687349 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="711712fe-5398-42c5-bff6-d8f984c47764" path="/var/lib/kubelet/pods/711712fe-5398-42c5-bff6-d8f984c47764/volumes" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.688374 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" path="/var/lib/kubelet/pods/b040fd94-da16-41e4-8fdf-66b7a8e05d87/volumes" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.688974 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" path="/var/lib/kubelet/pods/b420867b-5c27-4b98-a2ab-0dd31175f5c0/volumes" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.885412 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886329 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886356 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886379 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886392 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886408 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886442 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886458 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886470 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886485 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886499 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886558 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886571 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886594 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886606 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886627 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886639 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886654 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886668 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886686 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886698 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="extract-content" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886719 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886732 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886751 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886762 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886779 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886791 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="extract-utilities" Dec 03 08:46:39 crc kubenswrapper[4576]: E1203 08:46:39.886808 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886820 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886978 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.886997 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="701bd1ac-86c3-48f7-b195-490b5e187f00" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.887013 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2541618c-f550-48ef-9316-77a5dd7f1084" containerName="marketplace-operator" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.887026 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="711712fe-5398-42c5-bff6-d8f984c47764" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.887045 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b040fd94-da16-41e4-8fdf-66b7a8e05d87" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.887065 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b420867b-5c27-4b98-a2ab-0dd31175f5c0" containerName="registry-server" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.888426 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.891313 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 08:46:39 crc kubenswrapper[4576]: I1203 08:46:39.899870 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.014591 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tdpc\" (UniqueName: \"kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.014991 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.015213 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.076228 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.079515 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.082871 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.091350 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.117480 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tdpc\" (UniqueName: \"kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.117596 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.117671 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.118204 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.118771 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.135515 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tdpc\" (UniqueName: \"kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc\") pod \"redhat-marketplace-nwkqd\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.208851 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.219105 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.219371 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr7cl\" (UniqueName: \"kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.219501 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.321642 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.322037 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr7cl\" (UniqueName: \"kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.322105 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.322341 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.322623 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.345813 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr7cl\" (UniqueName: \"kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl\") pod \"redhat-operators-6pv5k\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.402404 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.603691 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:46:40 crc kubenswrapper[4576]: W1203 08:46:40.608376 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc78abb51_1399_44a2_8de8_16b060a40d50.slice/crio-a8f3592dfbfdccefbb3e4090aa5a2db8df75f127c3a86d0a0b817b6a9e2fb0f4 WatchSource:0}: Error finding container a8f3592dfbfdccefbb3e4090aa5a2db8df75f127c3a86d0a0b817b6a9e2fb0f4: Status 404 returned error can't find the container with id a8f3592dfbfdccefbb3e4090aa5a2db8df75f127c3a86d0a0b817b6a9e2fb0f4 Dec 03 08:46:40 crc kubenswrapper[4576]: I1203 08:46:40.839846 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:46:40 crc kubenswrapper[4576]: W1203 08:46:40.849253 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod774a50aa_bb45_4e93_aab4_c0bc7ba9e1e7.slice/crio-a16d1761b287055e978450142148ed4df4fca9cf8cca5aefc83258a97efdfbd7 WatchSource:0}: Error finding container a16d1761b287055e978450142148ed4df4fca9cf8cca5aefc83258a97efdfbd7: Status 404 returned error can't find the container with id a16d1761b287055e978450142148ed4df4fca9cf8cca5aefc83258a97efdfbd7 Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.338568 4576 generic.go:334] "Generic (PLEG): container finished" podID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerID="435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459" exitCode=0 Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.338629 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerDied","Data":"435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459"} Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.338893 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerStarted","Data":"a16d1761b287055e978450142148ed4df4fca9cf8cca5aefc83258a97efdfbd7"} Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.342334 4576 generic.go:334] "Generic (PLEG): container finished" podID="c78abb51-1399-44a2-8de8-16b060a40d50" containerID="bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f" exitCode=0 Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.343033 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerDied","Data":"bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f"} Dec 03 08:46:41 crc kubenswrapper[4576]: I1203 08:46:41.343099 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerStarted","Data":"a8f3592dfbfdccefbb3e4090aa5a2db8df75f127c3a86d0a0b817b6a9e2fb0f4"} Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.277666 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.278648 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.281318 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.295805 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.349756 4576 generic.go:334] "Generic (PLEG): container finished" podID="c78abb51-1399-44a2-8de8-16b060a40d50" containerID="daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5" exitCode=0 Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.349838 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerDied","Data":"daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5"} Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.351690 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerStarted","Data":"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171"} Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.447390 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74gf8\" (UniqueName: \"kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.447440 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.447476 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.482212 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.483818 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.486897 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.493376 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.548940 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.549043 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74gf8\" (UniqueName: \"kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.549065 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.549641 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.549648 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.569929 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74gf8\" (UniqueName: \"kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8\") pod \"certified-operators-pxhvn\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.596224 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.650315 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.650408 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prdzb\" (UniqueName: \"kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.650434 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.756450 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prdzb\" (UniqueName: \"kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.756972 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.758586 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.758760 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.759363 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.778302 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prdzb\" (UniqueName: \"kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb\") pod \"community-operators-dtm22\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:42 crc kubenswrapper[4576]: I1203 08:46:42.799408 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.009285 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:46:43 crc kubenswrapper[4576]: W1203 08:46:43.016931 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod140e6a9b_4403_44d7_a0f3_39b6a96c7cd7.slice/crio-2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d WatchSource:0}: Error finding container 2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d: Status 404 returned error can't find the container with id 2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.238768 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.358486 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerStarted","Data":"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b"} Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.360610 4576 generic.go:334] "Generic (PLEG): container finished" podID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerID="725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18" exitCode=0 Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.360680 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerDied","Data":"725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18"} Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.360697 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerStarted","Data":"2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d"} Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.364669 4576 generic.go:334] "Generic (PLEG): container finished" podID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerID="b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171" exitCode=0 Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.364745 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerDied","Data":"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171"} Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.366746 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerStarted","Data":"b1385aba1351901b0011ecb05542b6d187939eed240bf2ef40c15f9ed3b3a78c"} Dec 03 08:46:43 crc kubenswrapper[4576]: I1203 08:46:43.408515 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nwkqd" podStartSLOduration=2.893427238 podStartE2EDuration="4.408492485s" podCreationTimestamp="2025-12-03 08:46:39 +0000 UTC" firstStartedPulling="2025-12-03 08:46:41.344160075 +0000 UTC m=+408.730137099" lastFinishedPulling="2025-12-03 08:46:42.859225362 +0000 UTC m=+410.245202346" observedRunningTime="2025-12-03 08:46:43.386914655 +0000 UTC m=+410.772891639" watchObservedRunningTime="2025-12-03 08:46:43.408492485 +0000 UTC m=+410.794469469" Dec 03 08:46:44 crc kubenswrapper[4576]: I1203 08:46:44.372774 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerStarted","Data":"22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b"} Dec 03 08:46:44 crc kubenswrapper[4576]: I1203 08:46:44.376274 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerStarted","Data":"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb"} Dec 03 08:46:44 crc kubenswrapper[4576]: I1203 08:46:44.377967 4576 generic.go:334] "Generic (PLEG): container finished" podID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerID="a455414c142f60f11dc5bc5d141748620e3d62d96b2ba2d21424bbeddd072002" exitCode=0 Dec 03 08:46:44 crc kubenswrapper[4576]: I1203 08:46:44.378055 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerDied","Data":"a455414c142f60f11dc5bc5d141748620e3d62d96b2ba2d21424bbeddd072002"} Dec 03 08:46:44 crc kubenswrapper[4576]: I1203 08:46:44.443619 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6pv5k" podStartSLOduration=1.870063118 podStartE2EDuration="4.443596126s" podCreationTimestamp="2025-12-03 08:46:40 +0000 UTC" firstStartedPulling="2025-12-03 08:46:41.340383641 +0000 UTC m=+408.726360665" lastFinishedPulling="2025-12-03 08:46:43.913916689 +0000 UTC m=+411.299893673" observedRunningTime="2025-12-03 08:46:44.438765554 +0000 UTC m=+411.824742538" watchObservedRunningTime="2025-12-03 08:46:44.443596126 +0000 UTC m=+411.829573110" Dec 03 08:46:45 crc kubenswrapper[4576]: I1203 08:46:45.387040 4576 generic.go:334] "Generic (PLEG): container finished" podID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerID="fab57c53fe3cb29fcb345a22ba620afb1fb312bddf1df64fb0f948a2b4dbf262" exitCode=0 Dec 03 08:46:45 crc kubenswrapper[4576]: I1203 08:46:45.387130 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerDied","Data":"fab57c53fe3cb29fcb345a22ba620afb1fb312bddf1df64fb0f948a2b4dbf262"} Dec 03 08:46:45 crc kubenswrapper[4576]: I1203 08:46:45.390309 4576 generic.go:334] "Generic (PLEG): container finished" podID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerID="22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b" exitCode=0 Dec 03 08:46:45 crc kubenswrapper[4576]: I1203 08:46:45.390395 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerDied","Data":"22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b"} Dec 03 08:46:46 crc kubenswrapper[4576]: I1203 08:46:46.399429 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerStarted","Data":"ea7900eb5c499a7b7e9a8cb20c21167c1feddfbb14f2e471d7a14d2e6ce13541"} Dec 03 08:46:46 crc kubenswrapper[4576]: I1203 08:46:46.403244 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerStarted","Data":"3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea"} Dec 03 08:46:46 crc kubenswrapper[4576]: I1203 08:46:46.433973 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pxhvn" podStartSLOduration=1.884400722 podStartE2EDuration="4.433956163s" podCreationTimestamp="2025-12-03 08:46:42 +0000 UTC" firstStartedPulling="2025-12-03 08:46:43.362023604 +0000 UTC m=+410.748000598" lastFinishedPulling="2025-12-03 08:46:45.911579055 +0000 UTC m=+413.297556039" observedRunningTime="2025-12-03 08:46:46.431688041 +0000 UTC m=+413.817665025" watchObservedRunningTime="2025-12-03 08:46:46.433956163 +0000 UTC m=+413.819933147" Dec 03 08:46:46 crc kubenswrapper[4576]: I1203 08:46:46.459789 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dtm22" podStartSLOduration=3.042920511 podStartE2EDuration="4.459768779s" podCreationTimestamp="2025-12-03 08:46:42 +0000 UTC" firstStartedPulling="2025-12-03 08:46:44.379667907 +0000 UTC m=+411.765644891" lastFinishedPulling="2025-12-03 08:46:45.796516175 +0000 UTC m=+413.182493159" observedRunningTime="2025-12-03 08:46:46.455769459 +0000 UTC m=+413.841746443" watchObservedRunningTime="2025-12-03 08:46:46.459768779 +0000 UTC m=+413.845745763" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.209814 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.210417 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.253055 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.403354 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.404134 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.450200 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:50 crc kubenswrapper[4576]: I1203 08:46:50.480214 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:46:51 crc kubenswrapper[4576]: I1203 08:46:51.519720 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.596592 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.596715 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.648820 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.800392 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.800726 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:52 crc kubenswrapper[4576]: I1203 08:46:52.842815 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:46:53 crc kubenswrapper[4576]: I1203 08:46:53.525657 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:46:53 crc kubenswrapper[4576]: I1203 08:46:53.530312 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:47:00 crc kubenswrapper[4576]: I1203 08:47:00.814444 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" podUID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" containerName="registry" containerID="cri-o://da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d" gracePeriod=30 Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.411411 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.526820 4576 generic.go:334] "Generic (PLEG): container finished" podID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" containerID="da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d" exitCode=0 Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.526855 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" event={"ID":"d9c7dbff-42f0-43fe-bf81-e539cb523a77","Type":"ContainerDied","Data":"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d"} Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.526846 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.526898 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hg98m" event={"ID":"d9c7dbff-42f0-43fe-bf81-e539cb523a77","Type":"ContainerDied","Data":"1aeb0e0415c9363c96efae1056c6831602c8f1fdb04af4b33dfdfc17f1399ec1"} Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.526963 4576 scope.go:117] "RemoveContainer" containerID="da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.545909 4576 scope.go:117] "RemoveContainer" containerID="da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d" Dec 03 08:47:02 crc kubenswrapper[4576]: E1203 08:47:02.546460 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d\": container with ID starting with da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d not found: ID does not exist" containerID="da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.546510 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d"} err="failed to get container status \"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d\": rpc error: code = NotFound desc = could not find container \"da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d\": container with ID starting with da871a4fa637d0fc6ee6415a0e3c6f0737bd0afca404faf19d4f6c8adfcac12d not found: ID does not exist" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.552928 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.552971 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553005 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553061 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553084 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553295 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553318 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.553349 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4scw\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw\") pod \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\" (UID: \"d9c7dbff-42f0-43fe-bf81-e539cb523a77\") " Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.554311 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.554439 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.558322 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.560744 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.563095 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw" (OuterVolumeSpecName: "kube-api-access-f4scw") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "kube-api-access-f4scw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.563224 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.563700 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.584895 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "d9c7dbff-42f0-43fe-bf81-e539cb523a77" (UID: "d9c7dbff-42f0-43fe-bf81-e539cb523a77"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654902 4576 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9c7dbff-42f0-43fe-bf81-e539cb523a77-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654941 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4scw\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-kube-api-access-f4scw\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654955 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654963 4576 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9c7dbff-42f0-43fe-bf81-e539cb523a77-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654972 4576 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654980 4576 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.654987 4576 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9c7dbff-42f0-43fe-bf81-e539cb523a77-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.870339 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:47:02 crc kubenswrapper[4576]: I1203 08:47:02.874229 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hg98m"] Dec 03 08:47:03 crc kubenswrapper[4576]: I1203 08:47:03.682997 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" path="/var/lib/kubelet/pods/d9c7dbff-42f0-43fe-bf81-e539cb523a77/volumes" Dec 03 08:47:09 crc kubenswrapper[4576]: I1203 08:47:09.680315 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:47:09 crc kubenswrapper[4576]: I1203 08:47:09.681644 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:47:09 crc kubenswrapper[4576]: I1203 08:47:09.686408 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:47:09 crc kubenswrapper[4576]: I1203 08:47:09.688984 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:47:09 crc kubenswrapper[4576]: I1203 08:47:09.689063 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4" gracePeriod=600 Dec 03 08:47:10 crc kubenswrapper[4576]: I1203 08:47:10.577223 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4" exitCode=0 Dec 03 08:47:10 crc kubenswrapper[4576]: I1203 08:47:10.577326 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4"} Dec 03 08:47:10 crc kubenswrapper[4576]: I1203 08:47:10.577902 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7"} Dec 03 08:47:10 crc kubenswrapper[4576]: I1203 08:47:10.577933 4576 scope.go:117] "RemoveContainer" containerID="77fcbab4f8441fdeef4ff6f84326c1c5ab799faac0698120b8af9da36a524290" Dec 03 08:49:09 crc kubenswrapper[4576]: I1203 08:49:09.681092 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:49:09 crc kubenswrapper[4576]: I1203 08:49:09.682873 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:49:39 crc kubenswrapper[4576]: I1203 08:49:39.683638 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:49:39 crc kubenswrapper[4576]: I1203 08:49:39.684379 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:50:09 crc kubenswrapper[4576]: I1203 08:50:09.680784 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:50:09 crc kubenswrapper[4576]: I1203 08:50:09.681414 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:50:09 crc kubenswrapper[4576]: I1203 08:50:09.690178 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:50:09 crc kubenswrapper[4576]: I1203 08:50:09.691193 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:50:09 crc kubenswrapper[4576]: I1203 08:50:09.691413 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7" gracePeriod=600 Dec 03 08:50:10 crc kubenswrapper[4576]: I1203 08:50:10.828100 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7" exitCode=0 Dec 03 08:50:10 crc kubenswrapper[4576]: I1203 08:50:10.828262 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7"} Dec 03 08:50:10 crc kubenswrapper[4576]: I1203 08:50:10.828723 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570"} Dec 03 08:50:10 crc kubenswrapper[4576]: I1203 08:50:10.828832 4576 scope.go:117] "RemoveContainer" containerID="321c2215125142daaef8db5c8a6f580e99021024b3fde6bdf944426f9c6256a4" Dec 03 08:52:39 crc kubenswrapper[4576]: I1203 08:52:39.680592 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:52:39 crc kubenswrapper[4576]: I1203 08:52:39.683061 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:52:43 crc kubenswrapper[4576]: I1203 08:52:43.590056 4576 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 08:53:09 crc kubenswrapper[4576]: I1203 08:53:09.681301 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:53:09 crc kubenswrapper[4576]: I1203 08:53:09.682090 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:53:39 crc kubenswrapper[4576]: I1203 08:53:39.681566 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:53:39 crc kubenswrapper[4576]: I1203 08:53:39.682293 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:53:39 crc kubenswrapper[4576]: I1203 08:53:39.686243 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:53:39 crc kubenswrapper[4576]: I1203 08:53:39.687114 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:53:39 crc kubenswrapper[4576]: I1203 08:53:39.687244 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570" gracePeriod=600 Dec 03 08:53:40 crc kubenswrapper[4576]: I1203 08:53:40.819812 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570" exitCode=0 Dec 03 08:53:40 crc kubenswrapper[4576]: I1203 08:53:40.819873 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570"} Dec 03 08:53:40 crc kubenswrapper[4576]: I1203 08:53:40.822148 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653"} Dec 03 08:53:40 crc kubenswrapper[4576]: I1203 08:53:40.822278 4576 scope.go:117] "RemoveContainer" containerID="4bb3c2e5b1f7e3444ebee685683291d12fbcc3cb3482935140f572b2cc41dea7" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.162400 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:37 crc kubenswrapper[4576]: E1203 08:54:37.163352 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" containerName="registry" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.163368 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" containerName="registry" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.163572 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9c7dbff-42f0-43fe-bf81-e539cb523a77" containerName="registry" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.182263 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.215405 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.327321 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.327435 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.327547 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgt5\" (UniqueName: \"kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.428384 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.428742 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgt5\" (UniqueName: \"kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.428809 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.429042 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.429103 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.447942 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgt5\" (UniqueName: \"kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5\") pod \"certified-operators-dkt4d\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.523102 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:37 crc kubenswrapper[4576]: I1203 08:54:37.783434 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:38 crc kubenswrapper[4576]: I1203 08:54:38.221117 4576 generic.go:334] "Generic (PLEG): container finished" podID="f65589af-80da-4218-9330-dc5b8f2574fd" containerID="51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7" exitCode=0 Dec 03 08:54:38 crc kubenswrapper[4576]: I1203 08:54:38.221174 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerDied","Data":"51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7"} Dec 03 08:54:38 crc kubenswrapper[4576]: I1203 08:54:38.221687 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerStarted","Data":"a2dbdc05306cb74f0235040ad888cdc230a0a00ca3d695892c004ed423f9d811"} Dec 03 08:54:38 crc kubenswrapper[4576]: I1203 08:54:38.223686 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 08:54:40 crc kubenswrapper[4576]: I1203 08:54:40.245399 4576 generic.go:334] "Generic (PLEG): container finished" podID="f65589af-80da-4218-9330-dc5b8f2574fd" containerID="308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c" exitCode=0 Dec 03 08:54:40 crc kubenswrapper[4576]: I1203 08:54:40.261648 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerDied","Data":"308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c"} Dec 03 08:54:41 crc kubenswrapper[4576]: I1203 08:54:41.272914 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerStarted","Data":"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863"} Dec 03 08:54:41 crc kubenswrapper[4576]: I1203 08:54:41.290389 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dkt4d" podStartSLOduration=1.7443035070000001 podStartE2EDuration="4.290343164s" podCreationTimestamp="2025-12-03 08:54:37 +0000 UTC" firstStartedPulling="2025-12-03 08:54:38.223223821 +0000 UTC m=+885.609200815" lastFinishedPulling="2025-12-03 08:54:40.769263458 +0000 UTC m=+888.155240472" observedRunningTime="2025-12-03 08:54:41.289636284 +0000 UTC m=+888.675613308" watchObservedRunningTime="2025-12-03 08:54:41.290343164 +0000 UTC m=+888.676320148" Dec 03 08:54:47 crc kubenswrapper[4576]: I1203 08:54:47.523984 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:47 crc kubenswrapper[4576]: I1203 08:54:47.524563 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:47 crc kubenswrapper[4576]: I1203 08:54:47.584433 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:48 crc kubenswrapper[4576]: I1203 08:54:48.379792 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:48 crc kubenswrapper[4576]: I1203 08:54:48.427327 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.332679 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dkt4d" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="registry-server" containerID="cri-o://356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863" gracePeriod=2 Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.644768 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.751730 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txgt5\" (UniqueName: \"kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5\") pod \"f65589af-80da-4218-9330-dc5b8f2574fd\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.752116 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content\") pod \"f65589af-80da-4218-9330-dc5b8f2574fd\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.752155 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities\") pod \"f65589af-80da-4218-9330-dc5b8f2574fd\" (UID: \"f65589af-80da-4218-9330-dc5b8f2574fd\") " Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.753229 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities" (OuterVolumeSpecName: "utilities") pod "f65589af-80da-4218-9330-dc5b8f2574fd" (UID: "f65589af-80da-4218-9330-dc5b8f2574fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.757712 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5" (OuterVolumeSpecName: "kube-api-access-txgt5") pod "f65589af-80da-4218-9330-dc5b8f2574fd" (UID: "f65589af-80da-4218-9330-dc5b8f2574fd"). InnerVolumeSpecName "kube-api-access-txgt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.801180 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f65589af-80da-4218-9330-dc5b8f2574fd" (UID: "f65589af-80da-4218-9330-dc5b8f2574fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.853939 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.853997 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txgt5\" (UniqueName: \"kubernetes.io/projected/f65589af-80da-4218-9330-dc5b8f2574fd-kube-api-access-txgt5\") on node \"crc\" DevicePath \"\"" Dec 03 08:54:50 crc kubenswrapper[4576]: I1203 08:54:50.854018 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65589af-80da-4218-9330-dc5b8f2574fd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.360913 4576 generic.go:334] "Generic (PLEG): container finished" podID="f65589af-80da-4218-9330-dc5b8f2574fd" containerID="356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863" exitCode=0 Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.360969 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerDied","Data":"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863"} Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.361007 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dkt4d" event={"ID":"f65589af-80da-4218-9330-dc5b8f2574fd","Type":"ContainerDied","Data":"a2dbdc05306cb74f0235040ad888cdc230a0a00ca3d695892c004ed423f9d811"} Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.361047 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dkt4d" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.361053 4576 scope.go:117] "RemoveContainer" containerID="356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.382728 4576 scope.go:117] "RemoveContainer" containerID="308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.410344 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.413741 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dkt4d"] Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.414067 4576 scope.go:117] "RemoveContainer" containerID="51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.434758 4576 scope.go:117] "RemoveContainer" containerID="356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863" Dec 03 08:54:51 crc kubenswrapper[4576]: E1203 08:54:51.435554 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863\": container with ID starting with 356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863 not found: ID does not exist" containerID="356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.435741 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863"} err="failed to get container status \"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863\": rpc error: code = NotFound desc = could not find container \"356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863\": container with ID starting with 356edcdbf1b24a6b73197fdeb12595e03eab334d52d98045aa5a1486631a0863 not found: ID does not exist" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.435984 4576 scope.go:117] "RemoveContainer" containerID="308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c" Dec 03 08:54:51 crc kubenswrapper[4576]: E1203 08:54:51.436632 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c\": container with ID starting with 308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c not found: ID does not exist" containerID="308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.436700 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c"} err="failed to get container status \"308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c\": rpc error: code = NotFound desc = could not find container \"308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c\": container with ID starting with 308f3fb236f6ecac6977d7cc55ccaec393a87714bd32f3f3a28f8eeb265e856c not found: ID does not exist" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.436723 4576 scope.go:117] "RemoveContainer" containerID="51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7" Dec 03 08:54:51 crc kubenswrapper[4576]: E1203 08:54:51.437117 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7\": container with ID starting with 51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7 not found: ID does not exist" containerID="51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.437147 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7"} err="failed to get container status \"51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7\": rpc error: code = NotFound desc = could not find container \"51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7\": container with ID starting with 51736c690042c6429c301f07fa3a81175e5b11054bbe5ce52fca09fe595084b7 not found: ID does not exist" Dec 03 08:54:51 crc kubenswrapper[4576]: I1203 08:54:51.684856 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" path="/var/lib/kubelet/pods/f65589af-80da-4218-9330-dc5b8f2574fd/volumes" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.817228 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-l45pw"] Dec 03 08:55:00 crc kubenswrapper[4576]: E1203 08:55:00.818626 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="extract-content" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.818646 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="extract-content" Dec 03 08:55:00 crc kubenswrapper[4576]: E1203 08:55:00.818666 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="registry-server" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.818675 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="registry-server" Dec 03 08:55:00 crc kubenswrapper[4576]: E1203 08:55:00.818715 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="extract-utilities" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.818731 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="extract-utilities" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.818865 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65589af-80da-4218-9330-dc5b8f2574fd" containerName="registry-server" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.823098 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.827371 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.828909 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.831858 4576 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-bvlp5" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.838996 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-l45pw"] Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.846832 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ms2nw"] Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.847462 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-ms2nw" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.849179 4576 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-85xq9" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.869794 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ms2nw"] Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.884721 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjnrl"] Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.885337 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.888170 4576 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lvb94" Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.902888 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjnrl"] Dec 03 08:55:00 crc kubenswrapper[4576]: I1203 08:55:00.930504 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95r94\" (UniqueName: \"kubernetes.io/projected/59825354-6654-4c6a-be27-4d3b6f2a57c2-kube-api-access-95r94\") pod \"cert-manager-cainjector-7f985d654d-l45pw\" (UID: \"59825354-6654-4c6a-be27-4d3b6f2a57c2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.032031 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6wwf\" (UniqueName: \"kubernetes.io/projected/a1cefe8c-df93-4ed2-a334-c60ce9cc918d-kube-api-access-c6wwf\") pod \"cert-manager-webhook-5655c58dd6-fjnrl\" (UID: \"a1cefe8c-df93-4ed2-a334-c60ce9cc918d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.032096 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95r94\" (UniqueName: \"kubernetes.io/projected/59825354-6654-4c6a-be27-4d3b6f2a57c2-kube-api-access-95r94\") pod \"cert-manager-cainjector-7f985d654d-l45pw\" (UID: \"59825354-6654-4c6a-be27-4d3b6f2a57c2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.032169 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbrbr\" (UniqueName: \"kubernetes.io/projected/5d5e47f6-494f-4fc7-a4c0-c12410e86da3-kube-api-access-rbrbr\") pod \"cert-manager-5b446d88c5-ms2nw\" (UID: \"5d5e47f6-494f-4fc7-a4c0-c12410e86da3\") " pod="cert-manager/cert-manager-5b446d88c5-ms2nw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.056896 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95r94\" (UniqueName: \"kubernetes.io/projected/59825354-6654-4c6a-be27-4d3b6f2a57c2-kube-api-access-95r94\") pod \"cert-manager-cainjector-7f985d654d-l45pw\" (UID: \"59825354-6654-4c6a-be27-4d3b6f2a57c2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.133844 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6wwf\" (UniqueName: \"kubernetes.io/projected/a1cefe8c-df93-4ed2-a334-c60ce9cc918d-kube-api-access-c6wwf\") pod \"cert-manager-webhook-5655c58dd6-fjnrl\" (UID: \"a1cefe8c-df93-4ed2-a334-c60ce9cc918d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.133936 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbrbr\" (UniqueName: \"kubernetes.io/projected/5d5e47f6-494f-4fc7-a4c0-c12410e86da3-kube-api-access-rbrbr\") pod \"cert-manager-5b446d88c5-ms2nw\" (UID: \"5d5e47f6-494f-4fc7-a4c0-c12410e86da3\") " pod="cert-manager/cert-manager-5b446d88c5-ms2nw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.144415 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.152800 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbrbr\" (UniqueName: \"kubernetes.io/projected/5d5e47f6-494f-4fc7-a4c0-c12410e86da3-kube-api-access-rbrbr\") pod \"cert-manager-5b446d88c5-ms2nw\" (UID: \"5d5e47f6-494f-4fc7-a4c0-c12410e86da3\") " pod="cert-manager/cert-manager-5b446d88c5-ms2nw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.155416 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6wwf\" (UniqueName: \"kubernetes.io/projected/a1cefe8c-df93-4ed2-a334-c60ce9cc918d-kube-api-access-c6wwf\") pod \"cert-manager-webhook-5655c58dd6-fjnrl\" (UID: \"a1cefe8c-df93-4ed2-a334-c60ce9cc918d\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.160182 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-ms2nw" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.200743 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.434007 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-l45pw"] Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.473130 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-fjnrl"] Dec 03 08:55:01 crc kubenswrapper[4576]: W1203 08:55:01.485167 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1cefe8c_df93_4ed2_a334_c60ce9cc918d.slice/crio-7c31de5108eedb092b0e963c8effd83de8e98dd5f8e9ce82d52a30e1af4de4dd WatchSource:0}: Error finding container 7c31de5108eedb092b0e963c8effd83de8e98dd5f8e9ce82d52a30e1af4de4dd: Status 404 returned error can't find the container with id 7c31de5108eedb092b0e963c8effd83de8e98dd5f8e9ce82d52a30e1af4de4dd Dec 03 08:55:01 crc kubenswrapper[4576]: I1203 08:55:01.499284 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ms2nw"] Dec 03 08:55:01 crc kubenswrapper[4576]: W1203 08:55:01.499439 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d5e47f6_494f_4fc7_a4c0_c12410e86da3.slice/crio-6373e80502ca48df7d2c6bc1e843f7b5d54b929596bcf6e3fa6328e1b0a5b3b4 WatchSource:0}: Error finding container 6373e80502ca48df7d2c6bc1e843f7b5d54b929596bcf6e3fa6328e1b0a5b3b4: Status 404 returned error can't find the container with id 6373e80502ca48df7d2c6bc1e843f7b5d54b929596bcf6e3fa6328e1b0a5b3b4 Dec 03 08:55:02 crc kubenswrapper[4576]: I1203 08:55:02.433738 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" event={"ID":"59825354-6654-4c6a-be27-4d3b6f2a57c2","Type":"ContainerStarted","Data":"f3fb66a9f12eb75d4647eae335d9d878bbaad2121d88ac2e3773fbc0cce392d1"} Dec 03 08:55:02 crc kubenswrapper[4576]: I1203 08:55:02.434728 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-ms2nw" event={"ID":"5d5e47f6-494f-4fc7-a4c0-c12410e86da3","Type":"ContainerStarted","Data":"6373e80502ca48df7d2c6bc1e843f7b5d54b929596bcf6e3fa6328e1b0a5b3b4"} Dec 03 08:55:02 crc kubenswrapper[4576]: I1203 08:55:02.435980 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" event={"ID":"a1cefe8c-df93-4ed2-a334-c60ce9cc918d","Type":"ContainerStarted","Data":"7c31de5108eedb092b0e963c8effd83de8e98dd5f8e9ce82d52a30e1af4de4dd"} Dec 03 08:55:04 crc kubenswrapper[4576]: I1203 08:55:04.990601 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:04 crc kubenswrapper[4576]: I1203 08:55:04.992421 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.008984 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.009071 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.009104 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ks9q\" (UniqueName: \"kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.015267 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.110313 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ks9q\" (UniqueName: \"kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.110393 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.110437 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.110880 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.111087 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.147015 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ks9q\" (UniqueName: \"kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q\") pod \"redhat-marketplace-sv2s4\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.315888 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.453890 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" event={"ID":"59825354-6654-4c6a-be27-4d3b6f2a57c2","Type":"ContainerStarted","Data":"5171a98536e7c2a817fdfaffaa4706aa6186186d35e56b4ba0bad390cd466e48"} Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.461036 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-ms2nw" event={"ID":"5d5e47f6-494f-4fc7-a4c0-c12410e86da3","Type":"ContainerStarted","Data":"2e08d3bb049ad47bc333adbde17fce94c9b541a52b1a5c953c0405061a31ca5c"} Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.466921 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" event={"ID":"a1cefe8c-df93-4ed2-a334-c60ce9cc918d","Type":"ContainerStarted","Data":"ea32ae1d0c065dd9888cb6e07bdbdacb57a5449440d7ae47b5dc941ea34526f3"} Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.467415 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.476463 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-l45pw" podStartSLOduration=2.152983001 podStartE2EDuration="5.476419461s" podCreationTimestamp="2025-12-03 08:55:00 +0000 UTC" firstStartedPulling="2025-12-03 08:55:01.46121609 +0000 UTC m=+908.847193074" lastFinishedPulling="2025-12-03 08:55:04.78465254 +0000 UTC m=+912.170629534" observedRunningTime="2025-12-03 08:55:05.47492576 +0000 UTC m=+912.860902744" watchObservedRunningTime="2025-12-03 08:55:05.476419461 +0000 UTC m=+912.862396445" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.501217 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-ms2nw" podStartSLOduration=2.195534035 podStartE2EDuration="5.501199014s" podCreationTimestamp="2025-12-03 08:55:00 +0000 UTC" firstStartedPulling="2025-12-03 08:55:01.509646966 +0000 UTC m=+908.895623950" lastFinishedPulling="2025-12-03 08:55:04.815311925 +0000 UTC m=+912.201288929" observedRunningTime="2025-12-03 08:55:05.496876644 +0000 UTC m=+912.882853628" watchObservedRunningTime="2025-12-03 08:55:05.501199014 +0000 UTC m=+912.887175998" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.522341 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" podStartSLOduration=2.245171783 podStartE2EDuration="5.522324296s" podCreationTimestamp="2025-12-03 08:55:00 +0000 UTC" firstStartedPulling="2025-12-03 08:55:01.500139533 +0000 UTC m=+908.886116517" lastFinishedPulling="2025-12-03 08:55:04.777292046 +0000 UTC m=+912.163269030" observedRunningTime="2025-12-03 08:55:05.520941498 +0000 UTC m=+912.906918482" watchObservedRunningTime="2025-12-03 08:55:05.522324296 +0000 UTC m=+912.908301280" Dec 03 08:55:05 crc kubenswrapper[4576]: I1203 08:55:05.556828 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:06 crc kubenswrapper[4576]: I1203 08:55:06.476952 4576 generic.go:334] "Generic (PLEG): container finished" podID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerID="882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5" exitCode=0 Dec 03 08:55:06 crc kubenswrapper[4576]: I1203 08:55:06.477037 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerDied","Data":"882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5"} Dec 03 08:55:06 crc kubenswrapper[4576]: I1203 08:55:06.477371 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerStarted","Data":"7bb5995e29acaf5ca66dd353cb0dbde4dc89a14a0f0939511d4b7c9d4ed3e9e0"} Dec 03 08:55:08 crc kubenswrapper[4576]: I1203 08:55:08.490037 4576 generic.go:334] "Generic (PLEG): container finished" podID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerID="6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59" exitCode=0 Dec 03 08:55:08 crc kubenswrapper[4576]: I1203 08:55:08.490113 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerDied","Data":"6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59"} Dec 03 08:55:09 crc kubenswrapper[4576]: I1203 08:55:09.499481 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerStarted","Data":"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919"} Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.203779 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-fjnrl" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.230277 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sv2s4" podStartSLOduration=4.625944093 podStartE2EDuration="7.230253977s" podCreationTimestamp="2025-12-03 08:55:04 +0000 UTC" firstStartedPulling="2025-12-03 08:55:06.483104428 +0000 UTC m=+913.869081452" lastFinishedPulling="2025-12-03 08:55:09.087414342 +0000 UTC m=+916.473391336" observedRunningTime="2025-12-03 08:55:09.524837197 +0000 UTC m=+916.910814201" watchObservedRunningTime="2025-12-03 08:55:11.230253977 +0000 UTC m=+918.616230961" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.377800 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.379385 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.396845 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.500841 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.500921 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm4mx\" (UniqueName: \"kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.500988 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.602592 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.602679 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.602723 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm4mx\" (UniqueName: \"kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.603193 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.603307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.650003 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm4mx\" (UniqueName: \"kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx\") pod \"community-operators-sm8hx\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:11 crc kubenswrapper[4576]: I1203 08:55:11.713290 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:12 crc kubenswrapper[4576]: I1203 08:55:12.007701 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:12 crc kubenswrapper[4576]: I1203 08:55:12.518044 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerID="d3f64e2b6280dbed186d8483a91ac884d45e27f0cc9bc0300f344a4d6edcbfb8" exitCode=0 Dec 03 08:55:12 crc kubenswrapper[4576]: I1203 08:55:12.518083 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerDied","Data":"d3f64e2b6280dbed186d8483a91ac884d45e27f0cc9bc0300f344a4d6edcbfb8"} Dec 03 08:55:12 crc kubenswrapper[4576]: I1203 08:55:12.518108 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerStarted","Data":"cd1ed5df1dedc4b57089612d0c2c5b2fa6f7446aeddd7a08b8f749a40f40134f"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.233377 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5f9zh"] Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234284 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-controller" containerID="cri-o://dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234442 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234394 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" containerID="cri-o://eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234499 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="northd" containerID="cri-o://62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234600 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="nbdb" containerID="cri-o://eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234635 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="sbdb" containerID="cri-o://5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.234492 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-node" containerID="cri-o://6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.300178 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" containerID="cri-o://58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" gracePeriod=30 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.530780 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerID="4454e15692e71a7e4d22c182e1c84ecc5f3ffec4a27a04502b481a99bb2b9d7c" exitCode=0 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.531058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerDied","Data":"4454e15692e71a7e4d22c182e1c84ecc5f3ffec4a27a04502b481a99bb2b9d7c"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.534964 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovnkube-controller/3.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.536044 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/1.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.541629 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/0.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542170 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-controller/0.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542623 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" exitCode=0 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542660 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" exitCode=143 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542672 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" exitCode=0 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542681 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" exitCode=0 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542690 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" exitCode=143 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542762 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542798 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542812 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542823 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542866 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.542888 4576 scope.go:117] "RemoveContainer" containerID="3e93a2db7560906496e9579df87c1f307dbd99c51fa36b08d85dc762c02b531c" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.547136 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/2.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.547601 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/1.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.547640 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2b7eac2-6611-49d0-9da5-f97a3ccc9529" containerID="93a0f3796d3ca6c9b16721e20d639fadcb243de13badf957cca591620294c930" exitCode=2 Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.547673 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerDied","Data":"93a0f3796d3ca6c9b16721e20d639fadcb243de13badf957cca591620294c930"} Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.548112 4576 scope.go:117] "RemoveContainer" containerID="93a0f3796d3ca6c9b16721e20d639fadcb243de13badf957cca591620294c930" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.575181 4576 scope.go:117] "RemoveContainer" containerID="106766323b05884834bffc21cd45b77320e1bf62862139be3e9cfed8b004275e" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.739356 4576 scope.go:117] "RemoveContainer" containerID="43112fdd73d3944e79a634803caefc0a68a10da3cdf3713c117ade99e3d6f1c9" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.769583 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/1.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.771506 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-controller/0.log" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.772007 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.836836 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-z5gbz"] Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837412 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837435 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837445 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837454 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837466 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-node" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837475 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-node" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837490 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="northd" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837498 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="northd" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837511 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837519 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837589 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837597 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837609 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="nbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837617 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="nbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837625 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837633 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837643 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837650 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837662 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837669 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837677 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837686 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837700 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kubecfg-setup" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837707 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kubecfg-setup" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837717 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="sbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837725 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="sbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: E1203 08:55:14.837739 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837747 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837866 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="nbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837886 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837894 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="sbdb" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837906 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837915 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-node" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837925 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837933 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837942 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-acl-logging" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837953 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837961 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="northd" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837969 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.837977 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovn-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.838200 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerName="ovnkube-controller" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.842755 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944346 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944388 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944425 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944440 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944473 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944491 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944506 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944545 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944560 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944578 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqbqk\" (UniqueName: \"kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944593 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944608 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944625 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944641 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944663 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944684 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944700 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944726 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944756 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944772 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin\") pod \"cfe34f07-1425-4b62-9eb0-70d1b197611c\" (UID: \"cfe34f07-1425-4b62-9eb0-70d1b197611c\") " Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944893 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-netd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944920 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-bin\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944938 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-config\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944959 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-kubelet\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944978 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6v2j\" (UniqueName: \"kubernetes.io/projected/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-kube-api-access-p6v2j\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.944997 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-systemd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945017 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-netns\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945034 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-node-log\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945062 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-env-overrides\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945087 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-script-lib\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945111 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945127 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovn-node-metrics-cert\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945145 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-log-socket\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945161 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945175 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945197 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-slash\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945221 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-ovn\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945237 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-systemd-units\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945251 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-var-lib-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945266 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-etc-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945662 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945690 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash" (OuterVolumeSpecName: "host-slash") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945707 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945722 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945738 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945945 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.945966 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.946759 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947103 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947108 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947135 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log" (OuterVolumeSpecName: "node-log") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947175 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket" (OuterVolumeSpecName: "log-socket") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947192 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947199 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947228 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947236 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.947262 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.951299 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.951392 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk" (OuterVolumeSpecName: "kube-api-access-jqbqk") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "kube-api-access-jqbqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:14 crc kubenswrapper[4576]: I1203 08:55:14.972919 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "cfe34f07-1425-4b62-9eb0-70d1b197611c" (UID: "cfe34f07-1425-4b62-9eb0-70d1b197611c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046088 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046140 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovn-node-metrics-cert\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046165 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-log-socket\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046180 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046204 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046228 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-slash\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046248 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-ovn\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046261 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-systemd-units\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046275 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-var-lib-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046289 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-etc-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046310 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-netd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046317 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046367 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-bin\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046333 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-bin\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046437 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-config\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046493 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-kubelet\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6v2j\" (UniqueName: \"kubernetes.io/projected/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-kube-api-access-p6v2j\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046620 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-systemd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046643 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-netns\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046676 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-node-log\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046705 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-env-overrides\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046784 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-script-lib\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.046863 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-systemd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.047513 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-slash\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.047590 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-node-log\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.047668 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-netns\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-config\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048354 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-kubelet\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048378 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-ovn\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048398 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-log-socket\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048789 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-env-overrides\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048847 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-cni-netd\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048807 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-etc-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048828 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-systemd-units\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048878 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-host-run-ovn-kubernetes\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.048894 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-run-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049033 4576 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049054 4576 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049066 4576 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049077 4576 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049088 4576 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049098 4576 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049111 4576 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049122 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqbqk\" (UniqueName: \"kubernetes.io/projected/cfe34f07-1425-4b62-9eb0-70d1b197611c-kube-api-access-jqbqk\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049116 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-var-lib-openvswitch\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049235 4576 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049276 4576 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049286 4576 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049295 4576 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049305 4576 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049314 4576 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049345 4576 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049356 4576 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049369 4576 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049377 4576 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049388 4576 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cfe34f07-1425-4b62-9eb0-70d1b197611c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.049396 4576 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cfe34f07-1425-4b62-9eb0-70d1b197611c-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.050166 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovnkube-script-lib\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.050897 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-ovn-node-metrics-cert\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.071447 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6v2j\" (UniqueName: \"kubernetes.io/projected/a82ae146-bafe-4be7-b73c-3d3ec9185a7a-kube-api-access-p6v2j\") pod \"ovnkube-node-z5gbz\" (UID: \"a82ae146-bafe-4be7-b73c-3d3ec9185a7a\") " pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.155002 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:15 crc kubenswrapper[4576]: W1203 08:55:15.174616 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda82ae146_bafe_4be7_b73c_3d3ec9185a7a.slice/crio-216ba79390886eb0afada08ece618be21edfca5c6f0dc6995b71711d2a19ba67 WatchSource:0}: Error finding container 216ba79390886eb0afada08ece618be21edfca5c6f0dc6995b71711d2a19ba67: Status 404 returned error can't find the container with id 216ba79390886eb0afada08ece618be21edfca5c6f0dc6995b71711d2a19ba67 Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.317171 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.317454 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.362658 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.559696 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-acl-logging/1.log" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.562994 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5f9zh_cfe34f07-1425-4b62-9eb0-70d1b197611c/ovn-controller/0.log" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563465 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" exitCode=0 Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563494 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" exitCode=0 Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563506 4576 generic.go:334] "Generic (PLEG): container finished" podID="cfe34f07-1425-4b62-9eb0-70d1b197611c" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" exitCode=0 Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563564 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563608 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563624 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563640 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" event={"ID":"cfe34f07-1425-4b62-9eb0-70d1b197611c","Type":"ContainerDied","Data":"2ea6ab1b6c0d4a382c35bb286754ac5275591e818129b71b6efd6e0cc1310860"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563662 4576 scope.go:117] "RemoveContainer" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.563676 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5f9zh" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.574939 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jbxx2_e2b7eac2-6611-49d0-9da5-f97a3ccc9529/kube-multus/2.log" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.575019 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jbxx2" event={"ID":"e2b7eac2-6611-49d0-9da5-f97a3ccc9529","Type":"ContainerStarted","Data":"0dea9f559e3cec8bd5a6af2aeda86adad8019c8efc811fd1773faccc88866033"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.578885 4576 generic.go:334] "Generic (PLEG): container finished" podID="a82ae146-bafe-4be7-b73c-3d3ec9185a7a" containerID="0cf477af99e42317a71ad891a86c4effe413df1ed263a6691676f0aaf57b04af" exitCode=0 Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.579052 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerDied","Data":"0cf477af99e42317a71ad891a86c4effe413df1ed263a6691676f0aaf57b04af"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.579143 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"216ba79390886eb0afada08ece618be21edfca5c6f0dc6995b71711d2a19ba67"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.594332 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerStarted","Data":"7aff1286141d2a0db83b13c45e3421617b2a3148a784ec15dceab37e056ff8d2"} Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.628720 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sm8hx" podStartSLOduration=2.041861668 podStartE2EDuration="4.62870011s" podCreationTimestamp="2025-12-03 08:55:11 +0000 UTC" firstStartedPulling="2025-12-03 08:55:12.52099004 +0000 UTC m=+919.906967064" lastFinishedPulling="2025-12-03 08:55:15.107828522 +0000 UTC m=+922.493805506" observedRunningTime="2025-12-03 08:55:15.628653418 +0000 UTC m=+923.014630412" watchObservedRunningTime="2025-12-03 08:55:15.62870011 +0000 UTC m=+923.014677094" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.661105 4576 scope.go:117] "RemoveContainer" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.691104 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.700408 4576 scope.go:117] "RemoveContainer" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.707042 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5f9zh"] Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.714125 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5f9zh"] Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.720734 4576 scope.go:117] "RemoveContainer" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.732512 4576 scope.go:117] "RemoveContainer" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.746485 4576 scope.go:117] "RemoveContainer" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.763790 4576 scope.go:117] "RemoveContainer" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.786979 4576 scope.go:117] "RemoveContainer" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.816304 4576 scope.go:117] "RemoveContainer" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.836880 4576 scope.go:117] "RemoveContainer" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.838408 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": container with ID starting with 58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036 not found: ID does not exist" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.838453 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036"} err="failed to get container status \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": rpc error: code = NotFound desc = could not find container \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": container with ID starting with 58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.838486 4576 scope.go:117] "RemoveContainer" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.838986 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": container with ID starting with eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704 not found: ID does not exist" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839028 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704"} err="failed to get container status \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": rpc error: code = NotFound desc = could not find container \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": container with ID starting with eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839058 4576 scope.go:117] "RemoveContainer" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.839371 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": container with ID starting with 5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0 not found: ID does not exist" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839398 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0"} err="failed to get container status \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": rpc error: code = NotFound desc = could not find container \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": container with ID starting with 5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839414 4576 scope.go:117] "RemoveContainer" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.839713 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": container with ID starting with eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8 not found: ID does not exist" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839743 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8"} err="failed to get container status \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": rpc error: code = NotFound desc = could not find container \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": container with ID starting with eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.839771 4576 scope.go:117] "RemoveContainer" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.840054 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": container with ID starting with 62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c not found: ID does not exist" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.840079 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c"} err="failed to get container status \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": rpc error: code = NotFound desc = could not find container \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": container with ID starting with 62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.840119 4576 scope.go:117] "RemoveContainer" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.840844 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": container with ID starting with 5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4 not found: ID does not exist" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.840881 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4"} err="failed to get container status \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": rpc error: code = NotFound desc = could not find container \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": container with ID starting with 5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.840899 4576 scope.go:117] "RemoveContainer" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.841140 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": container with ID starting with 6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97 not found: ID does not exist" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841181 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97"} err="failed to get container status \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": rpc error: code = NotFound desc = could not find container \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": container with ID starting with 6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841204 4576 scope.go:117] "RemoveContainer" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.841454 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": container with ID starting with dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f not found: ID does not exist" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841481 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f"} err="failed to get container status \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": rpc error: code = NotFound desc = could not find container \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": container with ID starting with dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841496 4576 scope.go:117] "RemoveContainer" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" Dec 03 08:55:15 crc kubenswrapper[4576]: E1203 08:55:15.841737 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": container with ID starting with cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8 not found: ID does not exist" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841776 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8"} err="failed to get container status \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": rpc error: code = NotFound desc = could not find container \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": container with ID starting with cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.841789 4576 scope.go:117] "RemoveContainer" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842062 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036"} err="failed to get container status \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": rpc error: code = NotFound desc = could not find container \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": container with ID starting with 58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842085 4576 scope.go:117] "RemoveContainer" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842429 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704"} err="failed to get container status \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": rpc error: code = NotFound desc = could not find container \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": container with ID starting with eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842462 4576 scope.go:117] "RemoveContainer" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842755 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0"} err="failed to get container status \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": rpc error: code = NotFound desc = could not find container \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": container with ID starting with 5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.842773 4576 scope.go:117] "RemoveContainer" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843036 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8"} err="failed to get container status \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": rpc error: code = NotFound desc = could not find container \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": container with ID starting with eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843058 4576 scope.go:117] "RemoveContainer" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843268 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c"} err="failed to get container status \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": rpc error: code = NotFound desc = could not find container \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": container with ID starting with 62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843288 4576 scope.go:117] "RemoveContainer" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843537 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4"} err="failed to get container status \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": rpc error: code = NotFound desc = could not find container \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": container with ID starting with 5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843555 4576 scope.go:117] "RemoveContainer" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843848 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97"} err="failed to get container status \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": rpc error: code = NotFound desc = could not find container \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": container with ID starting with 6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.843867 4576 scope.go:117] "RemoveContainer" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844129 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f"} err="failed to get container status \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": rpc error: code = NotFound desc = could not find container \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": container with ID starting with dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844149 4576 scope.go:117] "RemoveContainer" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844399 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8"} err="failed to get container status \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": rpc error: code = NotFound desc = could not find container \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": container with ID starting with cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844415 4576 scope.go:117] "RemoveContainer" containerID="58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844716 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036"} err="failed to get container status \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": rpc error: code = NotFound desc = could not find container \"58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036\": container with ID starting with 58837871b925370f84d8c9465faa146eb92d65c964f3819d2f27135917439036 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844732 4576 scope.go:117] "RemoveContainer" containerID="eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844947 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704"} err="failed to get container status \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": rpc error: code = NotFound desc = could not find container \"eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704\": container with ID starting with eaee7b5be46e2890be92b4e0fbc3407d81e0d0934c5fffa8517df8784a2f4704 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.844968 4576 scope.go:117] "RemoveContainer" containerID="5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845175 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0"} err="failed to get container status \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": rpc error: code = NotFound desc = could not find container \"5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0\": container with ID starting with 5e93ef03812c3077006ce8a6299309dcca1cf5aeaa564775ef6a9563b8e3eae0 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845203 4576 scope.go:117] "RemoveContainer" containerID="eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845484 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8"} err="failed to get container status \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": rpc error: code = NotFound desc = could not find container \"eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8\": container with ID starting with eba2116cf6fe5ffd8fca9d4a3e7af4bb4f59db95f95653082b6c1517bf058fe8 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845501 4576 scope.go:117] "RemoveContainer" containerID="62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845775 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c"} err="failed to get container status \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": rpc error: code = NotFound desc = could not find container \"62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c\": container with ID starting with 62fddc3831eff97088d36bcc325ddb7ce89bfa1f8302816406b5cf4c25021d9c not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.845796 4576 scope.go:117] "RemoveContainer" containerID="5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.846065 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4"} err="failed to get container status \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": rpc error: code = NotFound desc = could not find container \"5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4\": container with ID starting with 5779cab2a9e0143fb85e6b6009461b7887f9c973a5d61ab8182f6daf9b7f78d4 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.846081 4576 scope.go:117] "RemoveContainer" containerID="6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.846993 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97"} err="failed to get container status \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": rpc error: code = NotFound desc = could not find container \"6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97\": container with ID starting with 6c4cfa29a3bc3f063aa99353bf881cb95215ac6319eecf941740060d8f730b97 not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.847007 4576 scope.go:117] "RemoveContainer" containerID="dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.847216 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f"} err="failed to get container status \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": rpc error: code = NotFound desc = could not find container \"dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f\": container with ID starting with dfd38f0c8feabf64818850ab37a5e2a9cf9948e1c0781385ec358c720c022a1f not found: ID does not exist" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.847247 4576 scope.go:117] "RemoveContainer" containerID="cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8" Dec 03 08:55:15 crc kubenswrapper[4576]: I1203 08:55:15.847422 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8"} err="failed to get container status \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": rpc error: code = NotFound desc = could not find container \"cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8\": container with ID starting with cbaea33479978458160218202af06ed173508a535d1d21b341cdba2c857433f8 not found: ID does not exist" Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.605475 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"f2a52cb4088638851ca6a41946971aad3116a8333e321959fe00100b3bc34bfc"} Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.607001 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"6455eb2f177de0de6ea89961efd40bd1fcd60bcb1a8150e0749fdb2dd49d7d3d"} Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.607109 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"3b7371f210d47bd038c584e4b0c15c5ffd3bec23671bda6d820ee46fe78b9be8"} Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.607204 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"745b1cb92837a46a9965722c19d4070666b0c91feeb3d45a325afce86c6c0382"} Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.607288 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"9908a6c24e39d044f8ddaf4263669f7a6bb49f55277811404ce35f5f74c326d2"} Dec 03 08:55:16 crc kubenswrapper[4576]: I1203 08:55:16.607375 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"3be21d9c29b9ae08a6d9b9e71dce26ef758e0567bccc8e9ea02b11b704672665"} Dec 03 08:55:17 crc kubenswrapper[4576]: I1203 08:55:17.686994 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfe34f07-1425-4b62-9eb0-70d1b197611c" path="/var/lib/kubelet/pods/cfe34f07-1425-4b62-9eb0-70d1b197611c/volumes" Dec 03 08:55:17 crc kubenswrapper[4576]: I1203 08:55:17.774286 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:17 crc kubenswrapper[4576]: I1203 08:55:17.775114 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sv2s4" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="registry-server" containerID="cri-o://1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919" gracePeriod=2 Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.033388 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.190888 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ks9q\" (UniqueName: \"kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q\") pod \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.191838 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content\") pod \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.192010 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities\") pod \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\" (UID: \"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5\") " Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.193071 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities" (OuterVolumeSpecName: "utilities") pod "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" (UID: "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.204296 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q" (OuterVolumeSpecName: "kube-api-access-9ks9q") pod "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" (UID: "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5"). InnerVolumeSpecName "kube-api-access-9ks9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.220664 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" (UID: "0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.294038 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.294098 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ks9q\" (UniqueName: \"kubernetes.io/projected/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-kube-api-access-9ks9q\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.294112 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.622056 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"ae2b13bcd290a5041c162acd4737c26b7eac54fe9c513853518492e44ce2f63b"} Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.624888 4576 generic.go:334] "Generic (PLEG): container finished" podID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerID="1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919" exitCode=0 Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.624971 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sv2s4" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.624984 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerDied","Data":"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919"} Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.625428 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sv2s4" event={"ID":"0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5","Type":"ContainerDied","Data":"7bb5995e29acaf5ca66dd353cb0dbde4dc89a14a0f0939511d4b7c9d4ed3e9e0"} Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.625466 4576 scope.go:117] "RemoveContainer" containerID="1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.642188 4576 scope.go:117] "RemoveContainer" containerID="6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.654245 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.658856 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sv2s4"] Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.668171 4576 scope.go:117] "RemoveContainer" containerID="882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.683580 4576 scope.go:117] "RemoveContainer" containerID="1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919" Dec 03 08:55:18 crc kubenswrapper[4576]: E1203 08:55:18.684300 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919\": container with ID starting with 1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919 not found: ID does not exist" containerID="1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.684356 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919"} err="failed to get container status \"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919\": rpc error: code = NotFound desc = could not find container \"1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919\": container with ID starting with 1943df8335351f2d58676e84f379026e26664be84c1d3793079da8982ae1c919 not found: ID does not exist" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.684393 4576 scope.go:117] "RemoveContainer" containerID="6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59" Dec 03 08:55:18 crc kubenswrapper[4576]: E1203 08:55:18.684750 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59\": container with ID starting with 6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59 not found: ID does not exist" containerID="6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.684780 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59"} err="failed to get container status \"6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59\": rpc error: code = NotFound desc = could not find container \"6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59\": container with ID starting with 6a2f1c3c03b423ace83b4d6ce4207f4e565f4f06e4df27b39b13e5a630e94f59 not found: ID does not exist" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.684801 4576 scope.go:117] "RemoveContainer" containerID="882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5" Dec 03 08:55:18 crc kubenswrapper[4576]: E1203 08:55:18.685264 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5\": container with ID starting with 882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5 not found: ID does not exist" containerID="882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5" Dec 03 08:55:18 crc kubenswrapper[4576]: I1203 08:55:18.685307 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5"} err="failed to get container status \"882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5\": rpc error: code = NotFound desc = could not find container \"882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5\": container with ID starting with 882a90b2ad3168adc10d56551ebf35f5f1a34cc283e6daa76a60c14df8d61be5 not found: ID does not exist" Dec 03 08:55:19 crc kubenswrapper[4576]: I1203 08:55:19.689317 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" path="/var/lib/kubelet/pods/0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5/volumes" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.652226 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" event={"ID":"a82ae146-bafe-4be7-b73c-3d3ec9185a7a","Type":"ContainerStarted","Data":"e1689da7082353394471b42cdd5b9ea33c1e7723f51b6688ca5b78ec0377ff78"} Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.652827 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.652855 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.652873 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.693195 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.699973 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" podStartSLOduration=7.699945714 podStartE2EDuration="7.699945714s" podCreationTimestamp="2025-12-03 08:55:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:55:21.693116296 +0000 UTC m=+929.079093320" watchObservedRunningTime="2025-12-03 08:55:21.699945714 +0000 UTC m=+929.085922718" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.715127 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.715504 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.715600 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:21 crc kubenswrapper[4576]: I1203 08:55:21.769980 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:22 crc kubenswrapper[4576]: I1203 08:55:22.709912 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:22 crc kubenswrapper[4576]: I1203 08:55:22.764976 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:24 crc kubenswrapper[4576]: I1203 08:55:24.680655 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sm8hx" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="registry-server" containerID="cri-o://7aff1286141d2a0db83b13c45e3421617b2a3148a784ec15dceab37e056ff8d2" gracePeriod=2 Dec 03 08:55:25 crc kubenswrapper[4576]: I1203 08:55:25.687762 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerID="7aff1286141d2a0db83b13c45e3421617b2a3148a784ec15dceab37e056ff8d2" exitCode=0 Dec 03 08:55:25 crc kubenswrapper[4576]: I1203 08:55:25.687839 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerDied","Data":"7aff1286141d2a0db83b13c45e3421617b2a3148a784ec15dceab37e056ff8d2"} Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.164115 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.340712 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities\") pod \"a2850519-8de4-48ce-a573-e56c2b01cc98\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.340963 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm4mx\" (UniqueName: \"kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx\") pod \"a2850519-8de4-48ce-a573-e56c2b01cc98\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.341021 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content\") pod \"a2850519-8de4-48ce-a573-e56c2b01cc98\" (UID: \"a2850519-8de4-48ce-a573-e56c2b01cc98\") " Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.342952 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities" (OuterVolumeSpecName: "utilities") pod "a2850519-8de4-48ce-a573-e56c2b01cc98" (UID: "a2850519-8de4-48ce-a573-e56c2b01cc98"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.347308 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx" (OuterVolumeSpecName: "kube-api-access-cm4mx") pod "a2850519-8de4-48ce-a573-e56c2b01cc98" (UID: "a2850519-8de4-48ce-a573-e56c2b01cc98"). InnerVolumeSpecName "kube-api-access-cm4mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.395204 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2850519-8de4-48ce-a573-e56c2b01cc98" (UID: "a2850519-8de4-48ce-a573-e56c2b01cc98"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.442396 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm4mx\" (UniqueName: \"kubernetes.io/projected/a2850519-8de4-48ce-a573-e56c2b01cc98-kube-api-access-cm4mx\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.442447 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.442462 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2850519-8de4-48ce-a573-e56c2b01cc98-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.696893 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm8hx" event={"ID":"a2850519-8de4-48ce-a573-e56c2b01cc98","Type":"ContainerDied","Data":"cd1ed5df1dedc4b57089612d0c2c5b2fa6f7446aeddd7a08b8f749a40f40134f"} Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.696961 4576 scope.go:117] "RemoveContainer" containerID="7aff1286141d2a0db83b13c45e3421617b2a3148a784ec15dceab37e056ff8d2" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.698593 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm8hx" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.714227 4576 scope.go:117] "RemoveContainer" containerID="4454e15692e71a7e4d22c182e1c84ecc5f3ffec4a27a04502b481a99bb2b9d7c" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.734477 4576 scope.go:117] "RemoveContainer" containerID="d3f64e2b6280dbed186d8483a91ac884d45e27f0cc9bc0300f344a4d6edcbfb8" Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.744435 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:26 crc kubenswrapper[4576]: I1203 08:55:26.746142 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sm8hx"] Dec 03 08:55:27 crc kubenswrapper[4576]: I1203 08:55:27.691840 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" path="/var/lib/kubelet/pods/a2850519-8de4-48ce-a573-e56c2b01cc98/volumes" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.932514 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933485 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="extract-content" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933504 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="extract-content" Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933553 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="extract-content" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933563 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="extract-content" Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933578 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="extract-utilities" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933586 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="extract-utilities" Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933596 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933603 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933616 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="extract-utilities" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933623 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="extract-utilities" Dec 03 08:55:38 crc kubenswrapper[4576]: E1203 08:55:38.933633 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933640 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933781 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2850519-8de4-48ce-a573-e56c2b01cc98" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.933809 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a8f635f-fbdd-4cd7-b7f1-f5740fb7deb5" containerName="registry-server" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.935004 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:38 crc kubenswrapper[4576]: I1203 08:55:38.984233 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.007119 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.007219 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.007269 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qwht\" (UniqueName: \"kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.108346 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.108443 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qwht\" (UniqueName: \"kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.108503 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.108957 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.109106 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.139555 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qwht\" (UniqueName: \"kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht\") pod \"redhat-operators-l8mqv\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.252072 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.475386 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.680717 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.681060 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.787889 4576 generic.go:334] "Generic (PLEG): container finished" podID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerID="085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1" exitCode=0 Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.787968 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerDied","Data":"085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1"} Dec 03 08:55:39 crc kubenswrapper[4576]: I1203 08:55:39.788044 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerStarted","Data":"47293fc70505b1944ba7be2c4675db37cde20877368acf6acddf35ea981823df"} Dec 03 08:55:40 crc kubenswrapper[4576]: I1203 08:55:40.805345 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerStarted","Data":"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635"} Dec 03 08:55:41 crc kubenswrapper[4576]: I1203 08:55:41.817691 4576 generic.go:334] "Generic (PLEG): container finished" podID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerID="aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635" exitCode=0 Dec 03 08:55:41 crc kubenswrapper[4576]: I1203 08:55:41.817765 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerDied","Data":"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635"} Dec 03 08:55:42 crc kubenswrapper[4576]: I1203 08:55:42.841723 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerStarted","Data":"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5"} Dec 03 08:55:42 crc kubenswrapper[4576]: I1203 08:55:42.867337 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l8mqv" podStartSLOduration=2.299535412 podStartE2EDuration="4.867301113s" podCreationTimestamp="2025-12-03 08:55:38 +0000 UTC" firstStartedPulling="2025-12-03 08:55:39.789568139 +0000 UTC m=+947.175545123" lastFinishedPulling="2025-12-03 08:55:42.35733384 +0000 UTC m=+949.743310824" observedRunningTime="2025-12-03 08:55:42.863069457 +0000 UTC m=+950.249046441" watchObservedRunningTime="2025-12-03 08:55:42.867301113 +0000 UTC m=+950.253278087" Dec 03 08:55:45 crc kubenswrapper[4576]: I1203 08:55:45.222330 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-z5gbz" Dec 03 08:55:49 crc kubenswrapper[4576]: I1203 08:55:49.253238 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:49 crc kubenswrapper[4576]: I1203 08:55:49.253915 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:49 crc kubenswrapper[4576]: I1203 08:55:49.307668 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:49 crc kubenswrapper[4576]: I1203 08:55:49.947793 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:50 crc kubenswrapper[4576]: I1203 08:55:50.011888 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:51 crc kubenswrapper[4576]: I1203 08:55:51.895628 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l8mqv" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="registry-server" containerID="cri-o://182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5" gracePeriod=2 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.616824 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.617245 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pxhvn" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" containerID="cri-o://ea7900eb5c499a7b7e9a8cb20c21167c1feddfbb14f2e471d7a14d2e6ce13541" gracePeriod=30 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.628932 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.629217 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dtm22" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="registry-server" containerID="cri-o://3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" gracePeriod=30 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.643898 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.644099 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" podUID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" containerName="marketplace-operator" containerID="cri-o://e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff" gracePeriod=30 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.655681 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.655903 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nwkqd" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="registry-server" containerID="cri-o://c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b" gracePeriod=30 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.668968 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.669239 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6pv5k" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="registry-server" containerID="cri-o://f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb" gracePeriod=30 Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.686152 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j26qz"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.686839 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.697187 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.697248 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2fw7\" (UniqueName: \"kubernetes.io/projected/c981f304-77aa-443d-8af7-3d665a32e754-kube-api-access-v2fw7\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.697279 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.701234 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j26qz"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.798376 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2fw7\" (UniqueName: \"kubernetes.io/projected/c981f304-77aa-443d-8af7-3d665a32e754-kube-api-access-v2fw7\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.798422 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.798735 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.799659 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: E1203 08:55:52.801388 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.808259 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c981f304-77aa-443d-8af7-3d665a32e754-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.814786 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2fw7\" (UniqueName: \"kubernetes.io/projected/c981f304-77aa-443d-8af7-3d665a32e754-kube-api-access-v2fw7\") pod \"marketplace-operator-79b997595-j26qz\" (UID: \"c981f304-77aa-443d-8af7-3d665a32e754\") " pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:52 crc kubenswrapper[4576]: E1203 08:55:52.817314 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:55:52 crc kubenswrapper[4576]: E1203 08:55:52.820077 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 08:55:52 crc kubenswrapper[4576]: E1203 08:55:52.820176 4576 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-marketplace/community-operators-dtm22" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="registry-server" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.971362 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/certified-operators-pxhvn" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" probeResult="failure" output="" Dec 03 08:55:52 crc kubenswrapper[4576]: I1203 08:55:52.973286 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/certified-operators-pxhvn" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" probeResult="failure" output="" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.007432 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.219321 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-j26qz"] Dec 03 08:55:53 crc kubenswrapper[4576]: W1203 08:55:53.243506 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc981f304_77aa_443d_8af7_3d665a32e754.slice/crio-9f7a569521cbc48ee89f35925d476c82773f49481d40ec9ff803296b90a3a55c WatchSource:0}: Error finding container 9f7a569521cbc48ee89f35925d476c82773f49481d40ec9ff803296b90a3a55c: Status 404 returned error can't find the container with id 9f7a569521cbc48ee89f35925d476c82773f49481d40ec9ff803296b90a3a55c Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.394793 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.414833 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities\") pod \"79b48d22-a968-422f-8bbb-5e8d82f63138\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.414927 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content\") pod \"79b48d22-a968-422f-8bbb-5e8d82f63138\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.414951 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qwht\" (UniqueName: \"kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht\") pod \"79b48d22-a968-422f-8bbb-5e8d82f63138\" (UID: \"79b48d22-a968-422f-8bbb-5e8d82f63138\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.419586 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities" (OuterVolumeSpecName: "utilities") pod "79b48d22-a968-422f-8bbb-5e8d82f63138" (UID: "79b48d22-a968-422f-8bbb-5e8d82f63138"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.420565 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht" (OuterVolumeSpecName: "kube-api-access-4qwht") pod "79b48d22-a968-422f-8bbb-5e8d82f63138" (UID: "79b48d22-a968-422f-8bbb-5e8d82f63138"). InnerVolumeSpecName "kube-api-access-4qwht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.488621 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.516965 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfsxt\" (UniqueName: \"kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt\") pod \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.517170 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics\") pod \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.517255 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca\") pod \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\" (UID: \"1d84683d-c810-4ef4-bd1c-6b17ed4c135d\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.517529 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qwht\" (UniqueName: \"kubernetes.io/projected/79b48d22-a968-422f-8bbb-5e8d82f63138-kube-api-access-4qwht\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.517563 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.518049 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "1d84683d-c810-4ef4-bd1c-6b17ed4c135d" (UID: "1d84683d-c810-4ef4-bd1c-6b17ed4c135d"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.521825 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt" (OuterVolumeSpecName: "kube-api-access-lfsxt") pod "1d84683d-c810-4ef4-bd1c-6b17ed4c135d" (UID: "1d84683d-c810-4ef4-bd1c-6b17ed4c135d"). InnerVolumeSpecName "kube-api-access-lfsxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.521933 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "1d84683d-c810-4ef4-bd1c-6b17ed4c135d" (UID: "1d84683d-c810-4ef4-bd1c-6b17ed4c135d"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.541217 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79b48d22-a968-422f-8bbb-5e8d82f63138" (UID: "79b48d22-a968-422f-8bbb-5e8d82f63138"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.618836 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79b48d22-a968-422f-8bbb-5e8d82f63138-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.618871 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.618882 4576 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.618894 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfsxt\" (UniqueName: \"kubernetes.io/projected/1d84683d-c810-4ef4-bd1c-6b17ed4c135d-kube-api-access-lfsxt\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.793364 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.870106 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.912854 4576 generic.go:334] "Generic (PLEG): container finished" podID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerID="f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.912932 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerDied","Data":"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.912962 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pv5k" event={"ID":"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7","Type":"ContainerDied","Data":"a16d1761b287055e978450142148ed4df4fca9cf8cca5aefc83258a97efdfbd7"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.913006 4576 scope.go:117] "RemoveContainer" containerID="f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.913174 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pv5k" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.931938 4576 generic.go:334] "Generic (PLEG): container finished" podID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerID="3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.932023 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerDied","Data":"3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.936031 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" event={"ID":"c981f304-77aa-443d-8af7-3d665a32e754","Type":"ContainerStarted","Data":"338ed128bd396ffc09162c3fbb725dd85f1b8e2fc501ceb3c69c9066a1f07d13"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.936069 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" event={"ID":"c981f304-77aa-443d-8af7-3d665a32e754","Type":"ContainerStarted","Data":"9f7a569521cbc48ee89f35925d476c82773f49481d40ec9ff803296b90a3a55c"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.937058 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939248 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities\") pod \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939281 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tdpc\" (UniqueName: \"kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc\") pod \"c78abb51-1399-44a2-8de8-16b060a40d50\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939301 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities\") pod \"c78abb51-1399-44a2-8de8-16b060a40d50\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939334 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content\") pod \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939354 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content\") pod \"c78abb51-1399-44a2-8de8-16b060a40d50\" (UID: \"c78abb51-1399-44a2-8de8-16b060a40d50\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939405 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr7cl\" (UniqueName: \"kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl\") pod \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\" (UID: \"774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7\") " Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939735 4576 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-j26qz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/healthz\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.939818 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" podUID="c981f304-77aa-443d-8af7-3d665a32e754" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.9:8080/healthz\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.943805 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities" (OuterVolumeSpecName: "utilities") pod "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" (UID: "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.945452 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities" (OuterVolumeSpecName: "utilities") pod "c78abb51-1399-44a2-8de8-16b060a40d50" (UID: "c78abb51-1399-44a2-8de8-16b060a40d50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.945679 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc" (OuterVolumeSpecName: "kube-api-access-2tdpc") pod "c78abb51-1399-44a2-8de8-16b060a40d50" (UID: "c78abb51-1399-44a2-8de8-16b060a40d50"). InnerVolumeSpecName "kube-api-access-2tdpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.948143 4576 generic.go:334] "Generic (PLEG): container finished" podID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerID="182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.948338 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8mqv" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.949263 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerDied","Data":"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.949325 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8mqv" event={"ID":"79b48d22-a968-422f-8bbb-5e8d82f63138","Type":"ContainerDied","Data":"47293fc70505b1944ba7be2c4675db37cde20877368acf6acddf35ea981823df"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.951437 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl" (OuterVolumeSpecName: "kube-api-access-jr7cl") pod "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" (UID: "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7"). InnerVolumeSpecName "kube-api-access-jr7cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.952503 4576 scope.go:117] "RemoveContainer" containerID="b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.956053 4576 generic.go:334] "Generic (PLEG): container finished" podID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" containerID="e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.956368 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.956969 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" event={"ID":"1d84683d-c810-4ef4-bd1c-6b17ed4c135d","Type":"ContainerDied","Data":"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.957057 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jf9sv" event={"ID":"1d84683d-c810-4ef4-bd1c-6b17ed4c135d","Type":"ContainerDied","Data":"e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.970532 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c78abb51-1399-44a2-8de8-16b060a40d50" (UID: "c78abb51-1399-44a2-8de8-16b060a40d50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.973482 4576 generic.go:334] "Generic (PLEG): container finished" podID="c78abb51-1399-44a2-8de8-16b060a40d50" containerID="c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.973509 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerDied","Data":"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.973561 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nwkqd" event={"ID":"c78abb51-1399-44a2-8de8-16b060a40d50","Type":"ContainerDied","Data":"a8f3592dfbfdccefbb3e4090aa5a2db8df75f127c3a86d0a0b817b6a9e2fb0f4"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.973497 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nwkqd" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.976832 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" podStartSLOduration=1.9768122049999999 podStartE2EDuration="1.976812205s" podCreationTimestamp="2025-12-03 08:55:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 08:55:53.961967876 +0000 UTC m=+961.347944870" watchObservedRunningTime="2025-12-03 08:55:53.976812205 +0000 UTC m=+961.362789189" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.991157 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.991703 4576 generic.go:334] "Generic (PLEG): container finished" podID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerID="ea7900eb5c499a7b7e9a8cb20c21167c1feddfbb14f2e471d7a14d2e6ce13541" exitCode=0 Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.991746 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerDied","Data":"ea7900eb5c499a7b7e9a8cb20c21167c1feddfbb14f2e471d7a14d2e6ce13541"} Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.993375 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:53 crc kubenswrapper[4576]: I1203 08:55:53.996961 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l8mqv"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.010250 4576 scope.go:117] "RemoveContainer" containerID="435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.015438 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.021019 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jf9sv"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.040333 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.040610 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tdpc\" (UniqueName: \"kubernetes.io/projected/c78abb51-1399-44a2-8de8-16b060a40d50-kube-api-access-2tdpc\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.040703 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.040771 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78abb51-1399-44a2-8de8-16b060a40d50-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.040835 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr7cl\" (UniqueName: \"kubernetes.io/projected/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-kube-api-access-jr7cl\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.064034 4576 scope.go:117] "RemoveContainer" containerID="f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.066504 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb\": container with ID starting with f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb not found: ID does not exist" containerID="f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.066578 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb"} err="failed to get container status \"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb\": rpc error: code = NotFound desc = could not find container \"f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb\": container with ID starting with f6635674ba75a2e2f5dbb475f9cf7902e8b1ab78f58eeae704e0397bcea53dfb not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.066611 4576 scope.go:117] "RemoveContainer" containerID="b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.068281 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171\": container with ID starting with b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171 not found: ID does not exist" containerID="b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.069057 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171"} err="failed to get container status \"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171\": rpc error: code = NotFound desc = could not find container \"b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171\": container with ID starting with b0aed14ee6031bc7ae8b2c18251f5330aa9e9aff071ff02e65ee784388a29171 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.069220 4576 scope.go:117] "RemoveContainer" containerID="435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.069702 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459\": container with ID starting with 435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459 not found: ID does not exist" containerID="435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.069853 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459"} err="failed to get container status \"435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459\": rpc error: code = NotFound desc = could not find container \"435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459\": container with ID starting with 435d5834c25e1ddb36cece176a4b48ee166e9019c9b520553e193f7a98661459 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.069961 4576 scope.go:117] "RemoveContainer" containerID="182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.072880 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.097496 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.099604 4576 scope.go:117] "RemoveContainer" containerID="aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.100950 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" (UID: "774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.103506 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nwkqd"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.139397 4576 scope.go:117] "RemoveContainer" containerID="085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.141273 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content\") pod \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.143647 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74gf8\" (UniqueName: \"kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8\") pod \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.143757 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities\") pod \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.143913 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prdzb\" (UniqueName: \"kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb\") pod \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.144016 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content\") pod \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\" (UID: \"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.144079 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities\") pod \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\" (UID: \"76c222ad-bbdf-40e2-ba6a-d30820ca0a74\") " Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.144356 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.145056 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities" (OuterVolumeSpecName: "utilities") pod "76c222ad-bbdf-40e2-ba6a-d30820ca0a74" (UID: "76c222ad-bbdf-40e2-ba6a-d30820ca0a74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.146668 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities" (OuterVolumeSpecName: "utilities") pod "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" (UID: "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.149722 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb" (OuterVolumeSpecName: "kube-api-access-prdzb") pod "76c222ad-bbdf-40e2-ba6a-d30820ca0a74" (UID: "76c222ad-bbdf-40e2-ba6a-d30820ca0a74"). InnerVolumeSpecName "kube-api-access-prdzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.159908 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8" (OuterVolumeSpecName: "kube-api-access-74gf8") pod "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" (UID: "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7"). InnerVolumeSpecName "kube-api-access-74gf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.187473 4576 scope.go:117] "RemoveContainer" containerID="182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.188164 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5\": container with ID starting with 182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5 not found: ID does not exist" containerID="182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.188200 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5"} err="failed to get container status \"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5\": rpc error: code = NotFound desc = could not find container \"182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5\": container with ID starting with 182d764e038514cb8157837d35514b7ef1512ab4e6c49fbfca813950de151db5 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.188265 4576 scope.go:117] "RemoveContainer" containerID="aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.188714 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635\": container with ID starting with aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635 not found: ID does not exist" containerID="aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.188747 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635"} err="failed to get container status \"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635\": rpc error: code = NotFound desc = could not find container \"aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635\": container with ID starting with aa2d1ade663ded7b46f048c4a189f74952038db0b2393a5392003c9adafd1635 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.188768 4576 scope.go:117] "RemoveContainer" containerID="085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.189839 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1\": container with ID starting with 085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1 not found: ID does not exist" containerID="085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.189878 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1"} err="failed to get container status \"085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1\": rpc error: code = NotFound desc = could not find container \"085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1\": container with ID starting with 085c48ec78d9c6874b8aae2793124a9b1603588e4e6345f77697409eb16fa5c1 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.189925 4576 scope.go:117] "RemoveContainer" containerID="e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.211020 4576 scope.go:117] "RemoveContainer" containerID="e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.212698 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76c222ad-bbdf-40e2-ba6a-d30820ca0a74" (UID: "76c222ad-bbdf-40e2-ba6a-d30820ca0a74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.215187 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff\": container with ID starting with e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff not found: ID does not exist" containerID="e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.215227 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff"} err="failed to get container status \"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff\": rpc error: code = NotFound desc = could not find container \"e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff\": container with ID starting with e591c1e4fce29e6f5433bc3f64ee9d95dea400671cf523cea7c19034dac59dff not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.215253 4576 scope.go:117] "RemoveContainer" containerID="c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.218444 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" (UID: "140e6a9b-4403-44d7-a0f3-39b6a96c7cd7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.236826 4576 scope.go:117] "RemoveContainer" containerID="daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245326 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prdzb\" (UniqueName: \"kubernetes.io/projected/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-kube-api-access-prdzb\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245370 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245396 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245409 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76c222ad-bbdf-40e2-ba6a-d30820ca0a74-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245422 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74gf8\" (UniqueName: \"kubernetes.io/projected/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-kube-api-access-74gf8\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.245432 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.255732 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.258657 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6pv5k"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.259269 4576 scope.go:117] "RemoveContainer" containerID="bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.273517 4576 scope.go:117] "RemoveContainer" containerID="c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.274495 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b\": container with ID starting with c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b not found: ID does not exist" containerID="c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.274584 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b"} err="failed to get container status \"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b\": rpc error: code = NotFound desc = could not find container \"c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b\": container with ID starting with c22378675cd8b7c658cd47a87fda63ca96313f86ed62dc8d3df2d199b006997b not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.274647 4576 scope.go:117] "RemoveContainer" containerID="daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.276296 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5\": container with ID starting with daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5 not found: ID does not exist" containerID="daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.276346 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5"} err="failed to get container status \"daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5\": rpc error: code = NotFound desc = could not find container \"daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5\": container with ID starting with daf931c403e3b5ebd9bf07d4170930f8e4bc89abc2f76fb8cbc99f2ecf43b3a5 not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.276374 4576 scope.go:117] "RemoveContainer" containerID="bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.276667 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f\": container with ID starting with bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f not found: ID does not exist" containerID="bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.276698 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f"} err="failed to get container status \"bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f\": rpc error: code = NotFound desc = could not find container \"bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f\": container with ID starting with bb91a0c626ba53e0fa3e91dc15c33ce5dd054e053d393766d97cf5565bebbd7f not found: ID does not exist" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.276716 4576 scope.go:117] "RemoveContainer" containerID="ea7900eb5c499a7b7e9a8cb20c21167c1feddfbb14f2e471d7a14d2e6ce13541" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.293607 4576 scope.go:117] "RemoveContainer" containerID="22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.298428 4576 scope.go:117] "RemoveContainer" containerID="a455414c142f60f11dc5bc5d141748620e3d62d96b2ba2d21424bbeddd072002" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.314249 4576 scope.go:117] "RemoveContainer" containerID="22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.320316 4576 scope.go:117] "RemoveContainer" containerID="725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.323097 4576 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_extract-content_certified-operators-pxhvn_openshift-marketplace_140e6a9b-4403-44d7-a0f3-39b6a96c7cd7_0 in pod sandbox 2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d from index: no such id: '22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b'" containerID="22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.323134 4576 kuberuntime_gc.go:150] "Failed to remove container" err="rpc error: code = Unknown desc = failed to delete container k8s_extract-content_certified-operators-pxhvn_openshift-marketplace_140e6a9b-4403-44d7-a0f3-39b6a96c7cd7_0 in pod sandbox 2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d from index: no such id: '22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b'" containerID="22d441599cf836f8ccb30eb05d3e0aaf5536209748c1b84903b6ad3cb823551b" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.323170 4576 scope.go:117] "RemoveContainer" containerID="3adfccc39aa89d8ebb4af533fd81cb6639ad70c9132e0d2afb8a0153b37df0ea" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.342287 4576 scope.go:117] "RemoveContainer" containerID="725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.342795 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18\": container with ID starting with 725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18 not found: ID does not exist" containerID="725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.342839 4576 kuberuntime_gc.go:150] "Failed to remove container" err="failed to get container status \"725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18\": rpc error: code = NotFound desc = could not find container \"725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18\": container with ID starting with 725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18 not found: ID does not exist" containerID="725c82ae70a5f390f5f8934925fd9243748e0ecdbe6dbde1148e72a121528b18" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.342869 4576 scope.go:117] "RemoveContainer" containerID="fab57c53fe3cb29fcb345a22ba620afb1fb312bddf1df64fb0f948a2b4dbf262" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.741741 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pv99n"] Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742013 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742033 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742049 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742057 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742068 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742075 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742086 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742094 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742102 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742110 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742120 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742128 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742139 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742147 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742157 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" containerName="marketplace-operator" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742165 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" containerName="marketplace-operator" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742175 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742183 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742195 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742205 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742216 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742224 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742232 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742240 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742253 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742263 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="extract-utilities" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742272 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742280 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742295 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742302 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="extract-content" Dec 03 08:55:54 crc kubenswrapper[4576]: E1203 08:55:54.742314 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742321 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742419 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" containerName="marketplace-operator" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742432 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742442 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742451 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742471 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.742481 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" containerName="registry-server" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.743386 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.745993 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.751931 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-catalog-content\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.751988 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-utilities\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.752052 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppkp7\" (UniqueName: \"kubernetes.io/projected/3fb60644-2f82-4e25-8121-0a9898ec0aa9-kube-api-access-ppkp7\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.757659 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv99n"] Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.853482 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppkp7\" (UniqueName: \"kubernetes.io/projected/3fb60644-2f82-4e25-8121-0a9898ec0aa9-kube-api-access-ppkp7\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.853607 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-catalog-content\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.853713 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-utilities\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.854847 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-utilities\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.855113 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb60644-2f82-4e25-8121-0a9898ec0aa9-catalog-content\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.871980 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppkp7\" (UniqueName: \"kubernetes.io/projected/3fb60644-2f82-4e25-8121-0a9898ec0aa9-kube-api-access-ppkp7\") pod \"redhat-marketplace-pv99n\" (UID: \"3fb60644-2f82-4e25-8121-0a9898ec0aa9\") " pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.996109 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxhvn" event={"ID":"140e6a9b-4403-44d7-a0f3-39b6a96c7cd7","Type":"ContainerDied","Data":"2296d703cbcac54bc6a5efe2b3613969dce6cc0eb138527da7ec06527bf8ae1d"} Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.996269 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dtm22" Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.996253 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dtm22" event={"ID":"76c222ad-bbdf-40e2-ba6a-d30820ca0a74","Type":"ContainerDied","Data":"b1385aba1351901b0011ecb05542b6d187939eed240bf2ef40c15f9ed3b3a78c"} Dec 03 08:55:54 crc kubenswrapper[4576]: I1203 08:55:54.998685 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxhvn" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.002406 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-j26qz" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.061232 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.118484 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.118854 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dtm22"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.141606 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.142515 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pxhvn"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.304534 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv99n"] Dec 03 08:55:55 crc kubenswrapper[4576]: W1203 08:55:55.312308 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fb60644_2f82_4e25_8121_0a9898ec0aa9.slice/crio-2b724d039e4170a139802670d78792f81194dd592ae775cf2406c96eab6892dc WatchSource:0}: Error finding container 2b724d039e4170a139802670d78792f81194dd592ae775cf2406c96eab6892dc: Status 404 returned error can't find the container with id 2b724d039e4170a139802670d78792f81194dd592ae775cf2406c96eab6892dc Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.346526 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.347786 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.351311 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.368872 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.465883 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.465928 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.465958 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg58z\" (UniqueName: \"kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.567872 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.568355 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.568392 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.568418 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg58z\" (UniqueName: \"kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.568626 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.586682 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg58z\" (UniqueName: \"kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z\") pod \"redhat-operators-pgd6q\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.683987 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="140e6a9b-4403-44d7-a0f3-39b6a96c7cd7" path="/var/lib/kubelet/pods/140e6a9b-4403-44d7-a0f3-39b6a96c7cd7/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.685066 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d84683d-c810-4ef4-bd1c-6b17ed4c135d" path="/var/lib/kubelet/pods/1d84683d-c810-4ef4-bd1c-6b17ed4c135d/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.685531 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76c222ad-bbdf-40e2-ba6a-d30820ca0a74" path="/var/lib/kubelet/pods/76c222ad-bbdf-40e2-ba6a-d30820ca0a74/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.686650 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7" path="/var/lib/kubelet/pods/774a50aa-bb45-4e93-aab4-c0bc7ba9e1e7/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.687226 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b48d22-a968-422f-8bbb-5e8d82f63138" path="/var/lib/kubelet/pods/79b48d22-a968-422f-8bbb-5e8d82f63138/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.688199 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c78abb51-1399-44a2-8de8-16b060a40d50" path="/var/lib/kubelet/pods/c78abb51-1399-44a2-8de8-16b060a40d50/volumes" Dec 03 08:55:55 crc kubenswrapper[4576]: I1203 08:55:55.743852 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:55:56 crc kubenswrapper[4576]: I1203 08:55:56.003038 4576 generic.go:334] "Generic (PLEG): container finished" podID="3fb60644-2f82-4e25-8121-0a9898ec0aa9" containerID="79560ce2c8f30a08613abfbeace2ee017499ef2eaa5759e08695a01862d32605" exitCode=0 Dec 03 08:55:56 crc kubenswrapper[4576]: I1203 08:55:56.003101 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv99n" event={"ID":"3fb60644-2f82-4e25-8121-0a9898ec0aa9","Type":"ContainerDied","Data":"79560ce2c8f30a08613abfbeace2ee017499ef2eaa5759e08695a01862d32605"} Dec 03 08:55:56 crc kubenswrapper[4576]: I1203 08:55:56.003146 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv99n" event={"ID":"3fb60644-2f82-4e25-8121-0a9898ec0aa9","Type":"ContainerStarted","Data":"2b724d039e4170a139802670d78792f81194dd592ae775cf2406c96eab6892dc"} Dec 03 08:55:56 crc kubenswrapper[4576]: I1203 08:55:56.187968 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 08:55:56 crc kubenswrapper[4576]: W1203 08:55:56.198696 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf2f6436_e933_49ce_8bad_aaa35e1fe3ec.slice/crio-3f03b4a94b24854614229489d110c470d5a9ca0c6f4407903013a3359ac7fbe8 WatchSource:0}: Error finding container 3f03b4a94b24854614229489d110c470d5a9ca0c6f4407903013a3359ac7fbe8: Status 404 returned error can't find the container with id 3f03b4a94b24854614229489d110c470d5a9ca0c6f4407903013a3359ac7fbe8 Dec 03 08:55:56 crc kubenswrapper[4576]: E1203 08:55:56.497639 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.012726 4576 generic.go:334] "Generic (PLEG): container finished" podID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerID="a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4" exitCode=0 Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.012774 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerDied","Data":"a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4"} Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.013084 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerStarted","Data":"3f03b4a94b24854614229489d110c470d5a9ca0c6f4407903013a3359ac7fbe8"} Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.016390 4576 generic.go:334] "Generic (PLEG): container finished" podID="3fb60644-2f82-4e25-8121-0a9898ec0aa9" containerID="c84bec8347c67dfc488cac1ac162c70633ba8a994e70ce2040d2befcda4dd524" exitCode=0 Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.016449 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv99n" event={"ID":"3fb60644-2f82-4e25-8121-0a9898ec0aa9","Type":"ContainerDied","Data":"c84bec8347c67dfc488cac1ac162c70633ba8a994e70ce2040d2befcda4dd524"} Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.147127 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.148351 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.151130 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.165650 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.187724 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5z4\" (UniqueName: \"kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.187781 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.187949 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.288827 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5z4\" (UniqueName: \"kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.288913 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.288993 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.289455 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.289671 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.312910 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5z4\" (UniqueName: \"kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4\") pod \"certified-operators-tt6tx\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.463752 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.749614 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wgbb8"] Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.751353 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.753306 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.757958 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wgbb8"] Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.896198 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-catalog-content\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.896263 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prlff\" (UniqueName: \"kubernetes.io/projected/576695bd-064a-4fc2-8aa2-ba863892d1bb-kube-api-access-prlff\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.896355 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-utilities\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.910960 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 08:55:57 crc kubenswrapper[4576]: W1203 08:55:57.914570 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod542eeeaf_465f_4cd8_a64d_32fde07cf5e3.slice/crio-f99a1ea7119f5e1f14a49b74d8a633ab2c94bc8b2646d0e52e496ff473d689a3 WatchSource:0}: Error finding container f99a1ea7119f5e1f14a49b74d8a633ab2c94bc8b2646d0e52e496ff473d689a3: Status 404 returned error can't find the container with id f99a1ea7119f5e1f14a49b74d8a633ab2c94bc8b2646d0e52e496ff473d689a3 Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.997060 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-catalog-content\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.997108 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prlff\" (UniqueName: \"kubernetes.io/projected/576695bd-064a-4fc2-8aa2-ba863892d1bb-kube-api-access-prlff\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.997162 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-utilities\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.997592 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-utilities\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:57 crc kubenswrapper[4576]: I1203 08:55:57.997931 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/576695bd-064a-4fc2-8aa2-ba863892d1bb-catalog-content\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.024245 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prlff\" (UniqueName: \"kubernetes.io/projected/576695bd-064a-4fc2-8aa2-ba863892d1bb-kube-api-access-prlff\") pod \"community-operators-wgbb8\" (UID: \"576695bd-064a-4fc2-8aa2-ba863892d1bb\") " pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.027019 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerStarted","Data":"f99a1ea7119f5e1f14a49b74d8a633ab2c94bc8b2646d0e52e496ff473d689a3"} Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.030053 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv99n" event={"ID":"3fb60644-2f82-4e25-8121-0a9898ec0aa9","Type":"ContainerStarted","Data":"1bf1d2581716d16157907c822cfc89bd3d450395daf04654fa1dddfbb76002a6"} Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.035171 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerStarted","Data":"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e"} Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.054468 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pv99n" podStartSLOduration=2.613198673 podStartE2EDuration="4.054445199s" podCreationTimestamp="2025-12-03 08:55:54 +0000 UTC" firstStartedPulling="2025-12-03 08:55:56.004503694 +0000 UTC m=+963.390480678" lastFinishedPulling="2025-12-03 08:55:57.44575022 +0000 UTC m=+964.831727204" observedRunningTime="2025-12-03 08:55:58.053442242 +0000 UTC m=+965.439419236" watchObservedRunningTime="2025-12-03 08:55:58.054445199 +0000 UTC m=+965.440422193" Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.078001 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:55:58 crc kubenswrapper[4576]: I1203 08:55:58.325689 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wgbb8"] Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.043483 4576 generic.go:334] "Generic (PLEG): container finished" podID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerID="37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e" exitCode=0 Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.043710 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerDied","Data":"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e"} Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.045273 4576 generic.go:334] "Generic (PLEG): container finished" podID="576695bd-064a-4fc2-8aa2-ba863892d1bb" containerID="7e0a56370751f23bcf0f61d8ebc2b9ff8bc6b0babdafabc92eebfced939860b1" exitCode=0 Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.045326 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgbb8" event={"ID":"576695bd-064a-4fc2-8aa2-ba863892d1bb","Type":"ContainerDied","Data":"7e0a56370751f23bcf0f61d8ebc2b9ff8bc6b0babdafabc92eebfced939860b1"} Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.045347 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgbb8" event={"ID":"576695bd-064a-4fc2-8aa2-ba863892d1bb","Type":"ContainerStarted","Data":"8a9c08c7a4e86a93f0898fe038d4524ed342864cef7ccaeb2433911d810d1545"} Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.048177 4576 generic.go:334] "Generic (PLEG): container finished" podID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerID="a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776" exitCode=0 Dec 03 08:55:59 crc kubenswrapper[4576]: I1203 08:55:59.048270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerDied","Data":"a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776"} Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.064117 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerStarted","Data":"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3"} Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.066448 4576 generic.go:334] "Generic (PLEG): container finished" podID="576695bd-064a-4fc2-8aa2-ba863892d1bb" containerID="b871808193b22c568a81ba7ab5c349751b42872a7c859e98a1c8fe8950e4cc27" exitCode=0 Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.066495 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgbb8" event={"ID":"576695bd-064a-4fc2-8aa2-ba863892d1bb","Type":"ContainerDied","Data":"b871808193b22c568a81ba7ab5c349751b42872a7c859e98a1c8fe8950e4cc27"} Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.070038 4576 generic.go:334] "Generic (PLEG): container finished" podID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerID="5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2" exitCode=0 Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.070071 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerDied","Data":"5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2"} Dec 03 08:56:00 crc kubenswrapper[4576]: I1203 08:56:00.086595 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pgd6q" podStartSLOduration=2.53676554 podStartE2EDuration="5.086502399s" podCreationTimestamp="2025-12-03 08:55:55 +0000 UTC" firstStartedPulling="2025-12-03 08:55:57.014837454 +0000 UTC m=+964.400814438" lastFinishedPulling="2025-12-03 08:55:59.564574303 +0000 UTC m=+966.950551297" observedRunningTime="2025-12-03 08:56:00.081393638 +0000 UTC m=+967.467370632" watchObservedRunningTime="2025-12-03 08:56:00.086502399 +0000 UTC m=+967.472479383" Dec 03 08:56:01 crc kubenswrapper[4576]: I1203 08:56:01.076754 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgbb8" event={"ID":"576695bd-064a-4fc2-8aa2-ba863892d1bb","Type":"ContainerStarted","Data":"48e8e3a6ff971a4115f0ec83be016f750c6f9cd6d5a197a7a49ee21bcd4aea7d"} Dec 03 08:56:01 crc kubenswrapper[4576]: I1203 08:56:01.080205 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerStarted","Data":"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b"} Dec 03 08:56:01 crc kubenswrapper[4576]: I1203 08:56:01.093770 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wgbb8" podStartSLOduration=2.6622987179999997 podStartE2EDuration="4.093751852s" podCreationTimestamp="2025-12-03 08:55:57 +0000 UTC" firstStartedPulling="2025-12-03 08:55:59.048115197 +0000 UTC m=+966.434092211" lastFinishedPulling="2025-12-03 08:56:00.479568361 +0000 UTC m=+967.865545345" observedRunningTime="2025-12-03 08:56:01.091335315 +0000 UTC m=+968.477312299" watchObservedRunningTime="2025-12-03 08:56:01.093751852 +0000 UTC m=+968.479728836" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.062392 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.062448 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.110324 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.129168 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tt6tx" podStartSLOduration=6.72612842 podStartE2EDuration="8.129095328s" podCreationTimestamp="2025-12-03 08:55:57 +0000 UTC" firstStartedPulling="2025-12-03 08:55:59.049862245 +0000 UTC m=+966.435839229" lastFinishedPulling="2025-12-03 08:56:00.452829153 +0000 UTC m=+967.838806137" observedRunningTime="2025-12-03 08:56:01.113123326 +0000 UTC m=+968.499100310" watchObservedRunningTime="2025-12-03 08:56:05.129095328 +0000 UTC m=+972.515072312" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.157723 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pv99n" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.744775 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.744820 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:56:05 crc kubenswrapper[4576]: I1203 08:56:05.781075 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:56:06 crc kubenswrapper[4576]: I1203 08:56:06.146629 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 08:56:06 crc kubenswrapper[4576]: E1203 08:56:06.615452 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:56:07 crc kubenswrapper[4576]: I1203 08:56:07.464733 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:56:07 crc kubenswrapper[4576]: I1203 08:56:07.465741 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:56:07 crc kubenswrapper[4576]: I1203 08:56:07.517341 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:56:08 crc kubenswrapper[4576]: I1203 08:56:08.078804 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:56:08 crc kubenswrapper[4576]: I1203 08:56:08.079581 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:56:08 crc kubenswrapper[4576]: I1203 08:56:08.127386 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:56:08 crc kubenswrapper[4576]: I1203 08:56:08.178224 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 08:56:09 crc kubenswrapper[4576]: I1203 08:56:09.177012 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wgbb8" Dec 03 08:56:09 crc kubenswrapper[4576]: I1203 08:56:09.681890 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:56:09 crc kubenswrapper[4576]: I1203 08:56:09.682221 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:56:16 crc kubenswrapper[4576]: E1203 08:56:16.748305 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:56:26 crc kubenswrapper[4576]: E1203 08:56:26.910790 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:56:37 crc kubenswrapper[4576]: E1203 08:56:37.056758 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:56:39 crc kubenswrapper[4576]: I1203 08:56:39.681242 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:56:39 crc kubenswrapper[4576]: I1203 08:56:39.682972 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:56:39 crc kubenswrapper[4576]: I1203 08:56:39.686215 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:56:39 crc kubenswrapper[4576]: I1203 08:56:39.687242 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:56:39 crc kubenswrapper[4576]: I1203 08:56:39.687427 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653" gracePeriod=600 Dec 03 08:56:40 crc kubenswrapper[4576]: I1203 08:56:40.332250 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653" exitCode=0 Dec 03 08:56:40 crc kubenswrapper[4576]: I1203 08:56:40.332414 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653"} Dec 03 08:56:40 crc kubenswrapper[4576]: I1203 08:56:40.332773 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e"} Dec 03 08:56:40 crc kubenswrapper[4576]: I1203 08:56:40.332972 4576 scope.go:117] "RemoveContainer" containerID="8d1bf2a31f9e2916f2f5be511327feda17a24fd71eb7831594cece38f7fce570" Dec 03 08:56:47 crc kubenswrapper[4576]: E1203 08:56:47.210576 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d84683d_c810_4ef4_bd1c_6b17ed4c135d.slice/crio-e0a08dc04ef42d40f28e95fcb0a4ee404cdf83f64bec281623f031e678fe95e6\": RecentStats: unable to find data in memory cache]" Dec 03 08:58:39 crc kubenswrapper[4576]: I1203 08:58:39.680954 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:58:39 crc kubenswrapper[4576]: I1203 08:58:39.682655 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:59:09 crc kubenswrapper[4576]: I1203 08:59:09.681634 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:59:09 crc kubenswrapper[4576]: I1203 08:59:09.682230 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.681626 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.683103 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.687368 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.688477 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.688726 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e" gracePeriod=600 Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.971423 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e" exitCode=0 Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.971686 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e"} Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.971845 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a"} Dec 03 08:59:39 crc kubenswrapper[4576]: I1203 08:59:39.971911 4576 scope.go:117] "RemoveContainer" containerID="8091374a757ed299d6fe7bbe393cc71b6502384be96d17f3905c6b2f7d07c653" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.155514 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls"] Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.159859 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.165254 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.165599 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.166786 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls"] Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.231693 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf96h\" (UniqueName: \"kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.232045 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.232194 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.334097 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.334162 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.334193 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf96h\" (UniqueName: \"kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.335545 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.344839 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.352117 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf96h\" (UniqueName: \"kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h\") pod \"collect-profiles-29412540-hchls\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.484098 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:00 crc kubenswrapper[4576]: I1203 09:00:00.731075 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls"] Dec 03 09:00:00 crc kubenswrapper[4576]: W1203 09:00:00.737197 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7234af20_614a_4fe5_adbb_68515a638bf9.slice/crio-a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5 WatchSource:0}: Error finding container a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5: Status 404 returned error can't find the container with id a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5 Dec 03 09:00:01 crc kubenswrapper[4576]: I1203 09:00:01.129277 4576 generic.go:334] "Generic (PLEG): container finished" podID="7234af20-614a-4fe5-adbb-68515a638bf9" containerID="b31f24842a68cba54f0597872edb8d9c492f7cd6fc0c3b867eb9898f16c3171b" exitCode=0 Dec 03 09:00:01 crc kubenswrapper[4576]: I1203 09:00:01.129449 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" event={"ID":"7234af20-614a-4fe5-adbb-68515a638bf9","Type":"ContainerDied","Data":"b31f24842a68cba54f0597872edb8d9c492f7cd6fc0c3b867eb9898f16c3171b"} Dec 03 09:00:01 crc kubenswrapper[4576]: I1203 09:00:01.129645 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" event={"ID":"7234af20-614a-4fe5-adbb-68515a638bf9","Type":"ContainerStarted","Data":"a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5"} Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.353034 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.475597 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume\") pod \"7234af20-614a-4fe5-adbb-68515a638bf9\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.475766 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume\") pod \"7234af20-614a-4fe5-adbb-68515a638bf9\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.475878 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf96h\" (UniqueName: \"kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h\") pod \"7234af20-614a-4fe5-adbb-68515a638bf9\" (UID: \"7234af20-614a-4fe5-adbb-68515a638bf9\") " Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.477042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume" (OuterVolumeSpecName: "config-volume") pod "7234af20-614a-4fe5-adbb-68515a638bf9" (UID: "7234af20-614a-4fe5-adbb-68515a638bf9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.483365 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7234af20-614a-4fe5-adbb-68515a638bf9" (UID: "7234af20-614a-4fe5-adbb-68515a638bf9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.483394 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h" (OuterVolumeSpecName: "kube-api-access-xf96h") pod "7234af20-614a-4fe5-adbb-68515a638bf9" (UID: "7234af20-614a-4fe5-adbb-68515a638bf9"). InnerVolumeSpecName "kube-api-access-xf96h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.577870 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7234af20-614a-4fe5-adbb-68515a638bf9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.577927 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf96h\" (UniqueName: \"kubernetes.io/projected/7234af20-614a-4fe5-adbb-68515a638bf9-kube-api-access-xf96h\") on node \"crc\" DevicePath \"\"" Dec 03 09:00:02 crc kubenswrapper[4576]: I1203 09:00:02.577943 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7234af20-614a-4fe5-adbb-68515a638bf9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:00:03 crc kubenswrapper[4576]: I1203 09:00:03.144884 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" event={"ID":"7234af20-614a-4fe5-adbb-68515a638bf9","Type":"ContainerDied","Data":"a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5"} Dec 03 09:00:03 crc kubenswrapper[4576]: I1203 09:00:03.144991 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0e52843464402adff7ce5b187ccbabe38e3d400d332078b07df4ffaa43d4fa5" Dec 03 09:00:03 crc kubenswrapper[4576]: I1203 09:00:03.145104 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls" Dec 03 09:01:39 crc kubenswrapper[4576]: I1203 09:01:39.680911 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:01:39 crc kubenswrapper[4576]: I1203 09:01:39.681467 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:02:09 crc kubenswrapper[4576]: I1203 09:02:09.680870 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:02:09 crc kubenswrapper[4576]: I1203 09:02:09.681964 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:02:39 crc kubenswrapper[4576]: I1203 09:02:39.681226 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:02:39 crc kubenswrapper[4576]: I1203 09:02:39.682100 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:02:39 crc kubenswrapper[4576]: I1203 09:02:39.691331 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:02:39 crc kubenswrapper[4576]: I1203 09:02:39.692220 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:02:39 crc kubenswrapper[4576]: I1203 09:02:39.692386 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a" gracePeriod=600 Dec 03 09:02:40 crc kubenswrapper[4576]: I1203 09:02:40.288233 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a" exitCode=0 Dec 03 09:02:40 crc kubenswrapper[4576]: I1203 09:02:40.288319 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a"} Dec 03 09:02:40 crc kubenswrapper[4576]: I1203 09:02:40.288490 4576 scope.go:117] "RemoveContainer" containerID="ca1914312a3c325798ab3b1da6e9a61258797717812f29a3fbe79ba8b8c81a9e" Dec 03 09:02:41 crc kubenswrapper[4576]: I1203 09:02:41.299031 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91"} Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.426711 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7"] Dec 03 09:04:18 crc kubenswrapper[4576]: E1203 09:04:18.427658 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7234af20-614a-4fe5-adbb-68515a638bf9" containerName="collect-profiles" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.427675 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="7234af20-614a-4fe5-adbb-68515a638bf9" containerName="collect-profiles" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.427824 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="7234af20-614a-4fe5-adbb-68515a638bf9" containerName="collect-profiles" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.428978 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.431353 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.450834 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7"] Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.492758 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65gn7\" (UniqueName: \"kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.493047 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.493156 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.594700 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.594841 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65gn7\" (UniqueName: \"kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.594929 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.595777 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.595816 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.626475 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65gn7\" (UniqueName: \"kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:18 crc kubenswrapper[4576]: I1203 09:04:18.748197 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:19 crc kubenswrapper[4576]: I1203 09:04:19.169007 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7"] Dec 03 09:04:19 crc kubenswrapper[4576]: I1203 09:04:19.944790 4576 generic.go:334] "Generic (PLEG): container finished" podID="afe8f623-5aa0-4830-8144-e8f207db1316" containerID="a208ddcd731cae444cd2b328b75c82e3811066b49dd1522a686cf3eeab0455e3" exitCode=0 Dec 03 09:04:19 crc kubenswrapper[4576]: I1203 09:04:19.944846 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" event={"ID":"afe8f623-5aa0-4830-8144-e8f207db1316","Type":"ContainerDied","Data":"a208ddcd731cae444cd2b328b75c82e3811066b49dd1522a686cf3eeab0455e3"} Dec 03 09:04:19 crc kubenswrapper[4576]: I1203 09:04:19.945125 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" event={"ID":"afe8f623-5aa0-4830-8144-e8f207db1316","Type":"ContainerStarted","Data":"3189c6e3d8e5f9124d05d4ec80ff30364cdb575fe4cb4dde6c242d7894331115"} Dec 03 09:04:19 crc kubenswrapper[4576]: I1203 09:04:19.947004 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:04:21 crc kubenswrapper[4576]: I1203 09:04:21.960033 4576 generic.go:334] "Generic (PLEG): container finished" podID="afe8f623-5aa0-4830-8144-e8f207db1316" containerID="da04302cc2f2484558de04e5ae33458ef45883c3d7e90f7719376ac29977d7a3" exitCode=0 Dec 03 09:04:21 crc kubenswrapper[4576]: I1203 09:04:21.960685 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" event={"ID":"afe8f623-5aa0-4830-8144-e8f207db1316","Type":"ContainerDied","Data":"da04302cc2f2484558de04e5ae33458ef45883c3d7e90f7719376ac29977d7a3"} Dec 03 09:04:22 crc kubenswrapper[4576]: I1203 09:04:22.969598 4576 generic.go:334] "Generic (PLEG): container finished" podID="afe8f623-5aa0-4830-8144-e8f207db1316" containerID="97d5a9e5ced387023a21dfbfe25e9225eb72a9418b1f6f6f7eab76f522246114" exitCode=0 Dec 03 09:04:22 crc kubenswrapper[4576]: I1203 09:04:22.969675 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" event={"ID":"afe8f623-5aa0-4830-8144-e8f207db1316","Type":"ContainerDied","Data":"97d5a9e5ced387023a21dfbfe25e9225eb72a9418b1f6f6f7eab76f522246114"} Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.211433 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.283739 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle\") pod \"afe8f623-5aa0-4830-8144-e8f207db1316\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.283828 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util\") pod \"afe8f623-5aa0-4830-8144-e8f207db1316\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.283878 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65gn7\" (UniqueName: \"kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7\") pod \"afe8f623-5aa0-4830-8144-e8f207db1316\" (UID: \"afe8f623-5aa0-4830-8144-e8f207db1316\") " Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.284659 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle" (OuterVolumeSpecName: "bundle") pod "afe8f623-5aa0-4830-8144-e8f207db1316" (UID: "afe8f623-5aa0-4830-8144-e8f207db1316"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.289901 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7" (OuterVolumeSpecName: "kube-api-access-65gn7") pod "afe8f623-5aa0-4830-8144-e8f207db1316" (UID: "afe8f623-5aa0-4830-8144-e8f207db1316"). InnerVolumeSpecName "kube-api-access-65gn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.305592 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util" (OuterVolumeSpecName: "util") pod "afe8f623-5aa0-4830-8144-e8f207db1316" (UID: "afe8f623-5aa0-4830-8144-e8f207db1316"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.385020 4576 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-util\") on node \"crc\" DevicePath \"\"" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.385049 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65gn7\" (UniqueName: \"kubernetes.io/projected/afe8f623-5aa0-4830-8144-e8f207db1316-kube-api-access-65gn7\") on node \"crc\" DevicePath \"\"" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.385059 4576 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe8f623-5aa0-4830-8144-e8f207db1316-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.982404 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" event={"ID":"afe8f623-5aa0-4830-8144-e8f207db1316","Type":"ContainerDied","Data":"3189c6e3d8e5f9124d05d4ec80ff30364cdb575fe4cb4dde6c242d7894331115"} Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.982476 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3189c6e3d8e5f9124d05d4ec80ff30364cdb575fe4cb4dde6c242d7894331115" Dec 03 09:04:24 crc kubenswrapper[4576]: I1203 09:04:24.982507 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.984201 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll"] Dec 03 09:04:25 crc kubenswrapper[4576]: E1203 09:04:25.984465 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="pull" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.984481 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="pull" Dec 03 09:04:25 crc kubenswrapper[4576]: E1203 09:04:25.984493 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="util" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.984501 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="util" Dec 03 09:04:25 crc kubenswrapper[4576]: E1203 09:04:25.984519 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="extract" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.984549 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="extract" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.984672 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe8f623-5aa0-4830-8144-e8f207db1316" containerName="extract" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.985182 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.991633 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.991857 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-wzx25" Dec 03 09:04:25 crc kubenswrapper[4576]: I1203 09:04:25.995056 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.002258 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll"] Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.105549 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr8qt\" (UniqueName: \"kubernetes.io/projected/fee161aa-5f26-41be-ba50-4b06079f597e-kube-api-access-nr8qt\") pod \"nmstate-operator-5b5b58f5c8-8ttll\" (UID: \"fee161aa-5f26-41be-ba50-4b06079f597e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.206777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr8qt\" (UniqueName: \"kubernetes.io/projected/fee161aa-5f26-41be-ba50-4b06079f597e-kube-api-access-nr8qt\") pod \"nmstate-operator-5b5b58f5c8-8ttll\" (UID: \"fee161aa-5f26-41be-ba50-4b06079f597e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.229689 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr8qt\" (UniqueName: \"kubernetes.io/projected/fee161aa-5f26-41be-ba50-4b06079f597e-kube-api-access-nr8qt\") pod \"nmstate-operator-5b5b58f5c8-8ttll\" (UID: \"fee161aa-5f26-41be-ba50-4b06079f597e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.299414 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.522984 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll"] Dec 03 09:04:26 crc kubenswrapper[4576]: I1203 09:04:26.993776 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" event={"ID":"fee161aa-5f26-41be-ba50-4b06079f597e","Type":"ContainerStarted","Data":"233d723910da5b5cf6a764a45f5aaa0fc6cbd73cdcba9d74890fc208796242a7"} Dec 03 09:04:29 crc kubenswrapper[4576]: I1203 09:04:29.003745 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" event={"ID":"fee161aa-5f26-41be-ba50-4b06079f597e","Type":"ContainerStarted","Data":"58c46669100fbd9c63e576c07859518b8d52a8f2568c6553aaac649952ffee07"} Dec 03 09:04:29 crc kubenswrapper[4576]: I1203 09:04:29.026779 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8ttll" podStartSLOduration=1.898132113 podStartE2EDuration="4.026726999s" podCreationTimestamp="2025-12-03 09:04:25 +0000 UTC" firstStartedPulling="2025-12-03 09:04:26.535390398 +0000 UTC m=+1473.921367382" lastFinishedPulling="2025-12-03 09:04:28.663985284 +0000 UTC m=+1476.049962268" observedRunningTime="2025-12-03 09:04:29.025052264 +0000 UTC m=+1476.411029248" watchObservedRunningTime="2025-12-03 09:04:29.026726999 +0000 UTC m=+1476.412703983" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.093900 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.095207 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.098870 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-nmw8h" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.110587 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.125811 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.126671 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.129767 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.130899 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ll4ls"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.139685 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.155586 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb4n6\" (UniqueName: \"kubernetes.io/projected/4811e0da-b69c-436c-a2f9-1796a35b69ee-kube-api-access-tb4n6\") pod \"nmstate-metrics-7f946cbc9-h2g5z\" (UID: \"4811e0da-b69c-436c-a2f9-1796a35b69ee\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.169470 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.256953 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-dbus-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257000 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/208539d3-2c0f-4889-9239-c3dddd20ad3b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257070 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-ovs-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257161 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb4n6\" (UniqueName: \"kubernetes.io/projected/4811e0da-b69c-436c-a2f9-1796a35b69ee-kube-api-access-tb4n6\") pod \"nmstate-metrics-7f946cbc9-h2g5z\" (UID: \"4811e0da-b69c-436c-a2f9-1796a35b69ee\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257203 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqmhr\" (UniqueName: \"kubernetes.io/projected/263f185a-e858-45d0-a61c-453056d0a98c-kube-api-access-pqmhr\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257241 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-nmstate-lock\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.257320 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m8dk\" (UniqueName: \"kubernetes.io/projected/208539d3-2c0f-4889-9239-c3dddd20ad3b-kube-api-access-8m8dk\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.271547 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.272387 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.277644 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.277917 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.278194 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-49j6x" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.296868 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.312454 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb4n6\" (UniqueName: \"kubernetes.io/projected/4811e0da-b69c-436c-a2f9-1796a35b69ee-kube-api-access-tb4n6\") pod \"nmstate-metrics-7f946cbc9-h2g5z\" (UID: \"4811e0da-b69c-436c-a2f9-1796a35b69ee\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358256 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-nmstate-lock\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358315 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m8dk\" (UniqueName: \"kubernetes.io/projected/208539d3-2c0f-4889-9239-c3dddd20ad3b-kube-api-access-8m8dk\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358349 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z7tc\" (UniqueName: \"kubernetes.io/projected/cb5b59ce-56be-4b48-902a-902dc9e7a707-kube-api-access-5z7tc\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358377 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-dbus-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358417 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/208539d3-2c0f-4889-9239-c3dddd20ad3b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358445 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-ovs-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358463 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-nmstate-lock\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358575 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-ovs-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.358739 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/263f185a-e858-45d0-a61c-453056d0a98c-dbus-socket\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.359018 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.359129 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqmhr\" (UniqueName: \"kubernetes.io/projected/263f185a-e858-45d0-a61c-453056d0a98c-kube-api-access-pqmhr\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.359201 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cb5b59ce-56be-4b48-902a-902dc9e7a707-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.378286 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/208539d3-2c0f-4889-9239-c3dddd20ad3b-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.383973 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m8dk\" (UniqueName: \"kubernetes.io/projected/208539d3-2c0f-4889-9239-c3dddd20ad3b-kube-api-access-8m8dk\") pod \"nmstate-webhook-5f6d4c5ccb-889j7\" (UID: \"208539d3-2c0f-4889-9239-c3dddd20ad3b\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.397227 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqmhr\" (UniqueName: \"kubernetes.io/projected/263f185a-e858-45d0-a61c-453056d0a98c-kube-api-access-pqmhr\") pod \"nmstate-handler-ll4ls\" (UID: \"263f185a-e858-45d0-a61c-453056d0a98c\") " pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.417217 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.441830 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.460423 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.460574 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.460649 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cb5b59ce-56be-4b48-902a-902dc9e7a707-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.460715 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z7tc\" (UniqueName: \"kubernetes.io/projected/cb5b59ce-56be-4b48-902a-902dc9e7a707-kube-api-access-5z7tc\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.462409 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cb5b59ce-56be-4b48-902a-902dc9e7a707-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: E1203 09:04:30.464304 4576 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 03 09:04:30 crc kubenswrapper[4576]: E1203 09:04:30.466669 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert podName:cb5b59ce-56be-4b48-902a-902dc9e7a707 nodeName:}" failed. No retries permitted until 2025-12-03 09:04:30.964452102 +0000 UTC m=+1478.350429096 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-rfj5d" (UID: "cb5b59ce-56be-4b48-902a-902dc9e7a707") : secret "plugin-serving-cert" not found Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.497909 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z7tc\" (UniqueName: \"kubernetes.io/projected/cb5b59ce-56be-4b48-902a-902dc9e7a707-kube-api-access-5z7tc\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.542985 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-676685f9d5-cwlx9"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.543915 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.558486 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-676685f9d5-cwlx9"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.664778 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.664836 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-oauth-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.664862 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckxnb\" (UniqueName: \"kubernetes.io/projected/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-kube-api-access-ckxnb\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.664908 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-trusted-ca-bundle\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.664966 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-oauth-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.665061 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-service-ca\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.665086 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766827 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-service-ca\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766890 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766911 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766928 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-oauth-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766943 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckxnb\" (UniqueName: \"kubernetes.io/projected/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-kube-api-access-ckxnb\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.766963 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-trusted-ca-bundle\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.767033 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-oauth-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.768156 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-oauth-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.768173 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-service-ca\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.769457 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.773808 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-trusted-ca-bundle\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.784186 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-oauth-config\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.793344 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-console-serving-cert\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.795094 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z"] Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.798360 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckxnb\" (UniqueName: \"kubernetes.io/projected/22abdb86-c7d9-4525-89c8-3a7603cf6f2c-kube-api-access-ckxnb\") pod \"console-676685f9d5-cwlx9\" (UID: \"22abdb86-c7d9-4525-89c8-3a7603cf6f2c\") " pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.854030 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7"] Dec 03 09:04:30 crc kubenswrapper[4576]: W1203 09:04:30.866203 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod208539d3_2c0f_4889_9239_c3dddd20ad3b.slice/crio-e91e1686a7c3156ba4b40aae2eb63f44d4b80287f6c38e9eb43a4424a4f47c65 WatchSource:0}: Error finding container e91e1686a7c3156ba4b40aae2eb63f44d4b80287f6c38e9eb43a4424a4f47c65: Status 404 returned error can't find the container with id e91e1686a7c3156ba4b40aae2eb63f44d4b80287f6c38e9eb43a4424a4f47c65 Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.876902 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.969213 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:30 crc kubenswrapper[4576]: I1203 09:04:30.972877 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb5b59ce-56be-4b48-902a-902dc9e7a707-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-rfj5d\" (UID: \"cb5b59ce-56be-4b48-902a-902dc9e7a707\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.015044 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ll4ls" event={"ID":"263f185a-e858-45d0-a61c-453056d0a98c","Type":"ContainerStarted","Data":"5c56033bb8215c8e23c748007c4911383b7ff559bd38d1a564489e85e3b44025"} Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.015918 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" event={"ID":"4811e0da-b69c-436c-a2f9-1796a35b69ee","Type":"ContainerStarted","Data":"0276ea36a09641d53ef4742aa77f0886437bb228db2d843db44218d421634b41"} Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.016804 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" event={"ID":"208539d3-2c0f-4889-9239-c3dddd20ad3b","Type":"ContainerStarted","Data":"e91e1686a7c3156ba4b40aae2eb63f44d4b80287f6c38e9eb43a4424a4f47c65"} Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.059674 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-676685f9d5-cwlx9"] Dec 03 09:04:31 crc kubenswrapper[4576]: W1203 09:04:31.063297 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22abdb86_c7d9_4525_89c8_3a7603cf6f2c.slice/crio-67357db2a12425bd1e7bbf05ac1be97c49943ceb67407cb601cfb8d230b309aa WatchSource:0}: Error finding container 67357db2a12425bd1e7bbf05ac1be97c49943ceb67407cb601cfb8d230b309aa: Status 404 returned error can't find the container with id 67357db2a12425bd1e7bbf05ac1be97c49943ceb67407cb601cfb8d230b309aa Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.187686 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" Dec 03 09:04:31 crc kubenswrapper[4576]: I1203 09:04:31.582182 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d"] Dec 03 09:04:31 crc kubenswrapper[4576]: W1203 09:04:31.590964 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb5b59ce_56be_4b48_902a_902dc9e7a707.slice/crio-a3c49ef2bac7b9b02626e3e11acd3bf98a4b9c6c425eed39667e789a73b8d483 WatchSource:0}: Error finding container a3c49ef2bac7b9b02626e3e11acd3bf98a4b9c6c425eed39667e789a73b8d483: Status 404 returned error can't find the container with id a3c49ef2bac7b9b02626e3e11acd3bf98a4b9c6c425eed39667e789a73b8d483 Dec 03 09:04:32 crc kubenswrapper[4576]: I1203 09:04:32.023285 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" event={"ID":"cb5b59ce-56be-4b48-902a-902dc9e7a707","Type":"ContainerStarted","Data":"a3c49ef2bac7b9b02626e3e11acd3bf98a4b9c6c425eed39667e789a73b8d483"} Dec 03 09:04:32 crc kubenswrapper[4576]: I1203 09:04:32.024721 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-676685f9d5-cwlx9" event={"ID":"22abdb86-c7d9-4525-89c8-3a7603cf6f2c","Type":"ContainerStarted","Data":"6715b8a31428cf414de2057babd55af1ed40897f7f0077e6eb52b5d01cac6f18"} Dec 03 09:04:32 crc kubenswrapper[4576]: I1203 09:04:32.025893 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-676685f9d5-cwlx9" event={"ID":"22abdb86-c7d9-4525-89c8-3a7603cf6f2c","Type":"ContainerStarted","Data":"67357db2a12425bd1e7bbf05ac1be97c49943ceb67407cb601cfb8d230b309aa"} Dec 03 09:04:32 crc kubenswrapper[4576]: I1203 09:04:32.051026 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-676685f9d5-cwlx9" podStartSLOduration=2.05098451 podStartE2EDuration="2.05098451s" podCreationTimestamp="2025-12-03 09:04:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:04:32.041986146 +0000 UTC m=+1479.427963130" watchObservedRunningTime="2025-12-03 09:04:32.05098451 +0000 UTC m=+1479.436961544" Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.046639 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" event={"ID":"208539d3-2c0f-4889-9239-c3dddd20ad3b","Type":"ContainerStarted","Data":"50fdf6ae07888b33ebc26700ba8167202ab133877c3e40347922e399762f37e4"} Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.047180 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.049878 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ll4ls" event={"ID":"263f185a-e858-45d0-a61c-453056d0a98c","Type":"ContainerStarted","Data":"696ebdceea0beaa53e548cb9530c9a29544b51547f07820f5965c1a632a546f0"} Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.049984 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.052213 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" event={"ID":"4811e0da-b69c-436c-a2f9-1796a35b69ee","Type":"ContainerStarted","Data":"ae7199338839dd8a811e09017041c81dac2d6ec84572435f67872d9a24ae76ec"} Dec 03 09:04:35 crc kubenswrapper[4576]: I1203 09:04:35.075081 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" podStartSLOduration=1.968790915 podStartE2EDuration="5.075057826s" podCreationTimestamp="2025-12-03 09:04:30 +0000 UTC" firstStartedPulling="2025-12-03 09:04:30.868778511 +0000 UTC m=+1478.254755495" lastFinishedPulling="2025-12-03 09:04:33.975045422 +0000 UTC m=+1481.361022406" observedRunningTime="2025-12-03 09:04:35.063489123 +0000 UTC m=+1482.449466107" watchObservedRunningTime="2025-12-03 09:04:35.075057826 +0000 UTC m=+1482.461034810" Dec 03 09:04:36 crc kubenswrapper[4576]: I1203 09:04:36.063360 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" event={"ID":"cb5b59ce-56be-4b48-902a-902dc9e7a707","Type":"ContainerStarted","Data":"02206d7ac1096a1dbd16ae7edad87a8e5becdce31fee352b9c8fdc0b14dcfc9b"} Dec 03 09:04:36 crc kubenswrapper[4576]: I1203 09:04:36.093645 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ll4ls" podStartSLOduration=2.610767976 podStartE2EDuration="6.093619776s" podCreationTimestamp="2025-12-03 09:04:30 +0000 UTC" firstStartedPulling="2025-12-03 09:04:30.546584124 +0000 UTC m=+1477.932561108" lastFinishedPulling="2025-12-03 09:04:34.029435924 +0000 UTC m=+1481.415412908" observedRunningTime="2025-12-03 09:04:35.121806651 +0000 UTC m=+1482.507783635" watchObservedRunningTime="2025-12-03 09:04:36.093619776 +0000 UTC m=+1483.479596760" Dec 03 09:04:36 crc kubenswrapper[4576]: I1203 09:04:36.094006 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-rfj5d" podStartSLOduration=2.562398297 podStartE2EDuration="6.094000216s" podCreationTimestamp="2025-12-03 09:04:30 +0000 UTC" firstStartedPulling="2025-12-03 09:04:31.593421969 +0000 UTC m=+1478.979398953" lastFinishedPulling="2025-12-03 09:04:35.125023888 +0000 UTC m=+1482.511000872" observedRunningTime="2025-12-03 09:04:36.088911259 +0000 UTC m=+1483.474888243" watchObservedRunningTime="2025-12-03 09:04:36.094000216 +0000 UTC m=+1483.479977200" Dec 03 09:04:37 crc kubenswrapper[4576]: I1203 09:04:37.071141 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" event={"ID":"4811e0da-b69c-436c-a2f9-1796a35b69ee","Type":"ContainerStarted","Data":"15fa225408d7cf6d29a0e4074a646f2748591e73de2cfc4fd829a882c795e66d"} Dec 03 09:04:40 crc kubenswrapper[4576]: I1203 09:04:40.485634 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ll4ls" Dec 03 09:04:40 crc kubenswrapper[4576]: I1203 09:04:40.509241 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-h2g5z" podStartSLOduration=4.766495337 podStartE2EDuration="10.509213415s" podCreationTimestamp="2025-12-03 09:04:30 +0000 UTC" firstStartedPulling="2025-12-03 09:04:30.808030178 +0000 UTC m=+1478.194007162" lastFinishedPulling="2025-12-03 09:04:36.550748256 +0000 UTC m=+1483.936725240" observedRunningTime="2025-12-03 09:04:37.100772448 +0000 UTC m=+1484.486749482" watchObservedRunningTime="2025-12-03 09:04:40.509213415 +0000 UTC m=+1487.895190399" Dec 03 09:04:40 crc kubenswrapper[4576]: I1203 09:04:40.877989 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:40 crc kubenswrapper[4576]: I1203 09:04:40.878044 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:40 crc kubenswrapper[4576]: I1203 09:04:40.883799 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:41 crc kubenswrapper[4576]: I1203 09:04:41.106118 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-676685f9d5-cwlx9" Dec 03 09:04:41 crc kubenswrapper[4576]: I1203 09:04:41.173245 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 09:04:50 crc kubenswrapper[4576]: I1203 09:04:50.449838 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-889j7" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.510231 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.512191 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.541414 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.665285 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.665355 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.665444 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99g7p\" (UniqueName: \"kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.766321 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.766904 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.767131 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.767332 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.767723 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99g7p\" (UniqueName: \"kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.800775 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99g7p\" (UniqueName: \"kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p\") pod \"redhat-marketplace-nqv7l\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:04 crc kubenswrapper[4576]: I1203 09:05:04.831438 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:05 crc kubenswrapper[4576]: I1203 09:05:05.560062 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.115230 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc"] Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.118543 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.129232 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.154343 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc"] Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.190880 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.190947 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.191090 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhwlr\" (UniqueName: \"kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.236240 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-h7ncw" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" containerID="cri-o://2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc" gracePeriod=15 Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.283227 4576 generic.go:334] "Generic (PLEG): container finished" podID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerID="c5960a1fac38abd5a7b596d44a6a549fc135d1ee73785bc0e1bef8d2ea480920" exitCode=0 Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.283354 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerDied","Data":"c5960a1fac38abd5a7b596d44a6a549fc135d1ee73785bc0e1bef8d2ea480920"} Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.283471 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerStarted","Data":"215819c7ff0d77aea9dc471855d676919af92737c25e604936c2d3ddb52e11db"} Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.292349 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.292502 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.292669 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhwlr\" (UniqueName: \"kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.293206 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.293987 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.319831 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhwlr\" (UniqueName: \"kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.444902 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.617397 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h7ncw_3cb91673-2622-41a5-91d3-a79e0ba7289b/console/0.log" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.617518 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.704460 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc"] Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707034 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707116 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707156 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707197 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzxps\" (UniqueName: \"kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707216 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707283 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.707297 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert\") pod \"3cb91673-2622-41a5-91d3-a79e0ba7289b\" (UID: \"3cb91673-2622-41a5-91d3-a79e0ba7289b\") " Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.708481 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.709204 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.714468 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.714793 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.718764 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca" (OuterVolumeSpecName: "service-ca") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.719100 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps" (OuterVolumeSpecName: "kube-api-access-mzxps") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "kube-api-access-mzxps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.719742 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config" (OuterVolumeSpecName: "console-config") pod "3cb91673-2622-41a5-91d3-a79e0ba7289b" (UID: "3cb91673-2622-41a5-91d3-a79e0ba7289b"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809227 4576 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809301 4576 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809314 4576 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809327 4576 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809340 4576 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3cb91673-2622-41a5-91d3-a79e0ba7289b-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809354 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzxps\" (UniqueName: \"kubernetes.io/projected/3cb91673-2622-41a5-91d3-a79e0ba7289b-kube-api-access-mzxps\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:06 crc kubenswrapper[4576]: I1203 09:05:06.809366 4576 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3cb91673-2622-41a5-91d3-a79e0ba7289b-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290574 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h7ncw_3cb91673-2622-41a5-91d3-a79e0ba7289b/console/0.log" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290621 4576 generic.go:334] "Generic (PLEG): container finished" podID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerID="2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc" exitCode=2 Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290675 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h7ncw" event={"ID":"3cb91673-2622-41a5-91d3-a79e0ba7289b","Type":"ContainerDied","Data":"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc"} Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290700 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h7ncw" event={"ID":"3cb91673-2622-41a5-91d3-a79e0ba7289b","Type":"ContainerDied","Data":"75ae8f891e53c283ae62ae63b91c952fabfd4271f51910d59023376d2c709cc4"} Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290733 4576 scope.go:117] "RemoveContainer" containerID="2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.290793 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h7ncw" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.293654 4576 generic.go:334] "Generic (PLEG): container finished" podID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerID="82aa1451ad763702d67c36fdf8fbe23c69a542ab3c233c27f0f801e3b76d102a" exitCode=0 Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.293745 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerDied","Data":"82aa1451ad763702d67c36fdf8fbe23c69a542ab3c233c27f0f801e3b76d102a"} Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.293793 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerStarted","Data":"638fd37c4d58e7b3cda37ef9f31cae84ba246d52f1942d4353b7df133f6bf15c"} Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.299836 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerStarted","Data":"cf183f545285dea5a7f4afb0f0de0490a212148b6f03abcb75b1af121c5153f7"} Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.315732 4576 scope.go:117] "RemoveContainer" containerID="2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc" Dec 03 09:05:07 crc kubenswrapper[4576]: E1203 09:05:07.316946 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc\": container with ID starting with 2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc not found: ID does not exist" containerID="2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.317054 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc"} err="failed to get container status \"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc\": rpc error: code = NotFound desc = could not find container \"2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc\": container with ID starting with 2245521e156360c88093e0e4899ab1ee8120afe1cdc928a6d30805ef3607d9fc not found: ID does not exist" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.357899 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.358640 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-h7ncw"] Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.438770 4576 patch_prober.go:28] interesting pod/console-f9d7485db-h7ncw container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: i/o timeout" start-of-body= Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.438922 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-h7ncw" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: i/o timeout" Dec 03 09:05:07 crc kubenswrapper[4576]: I1203 09:05:07.685336 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" path="/var/lib/kubelet/pods/3cb91673-2622-41a5-91d3-a79e0ba7289b/volumes" Dec 03 09:05:08 crc kubenswrapper[4576]: I1203 09:05:08.308597 4576 generic.go:334] "Generic (PLEG): container finished" podID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerID="cf183f545285dea5a7f4afb0f0de0490a212148b6f03abcb75b1af121c5153f7" exitCode=0 Dec 03 09:05:08 crc kubenswrapper[4576]: I1203 09:05:08.308710 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerDied","Data":"cf183f545285dea5a7f4afb0f0de0490a212148b6f03abcb75b1af121c5153f7"} Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.319302 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerStarted","Data":"91f07b4067b7cd3ce0961c7326a67ed6b5b0c1ae4242e5f8a7443990592e2c86"} Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.321780 4576 generic.go:334] "Generic (PLEG): container finished" podID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerID="aab59fe80d35bff4bc57d13bb15a0851ae9a632a895f8dc4261d3b0978f8a277" exitCode=0 Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.321913 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerDied","Data":"aab59fe80d35bff4bc57d13bb15a0851ae9a632a895f8dc4261d3b0978f8a277"} Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.349408 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nqv7l" podStartSLOduration=2.714890043 podStartE2EDuration="5.349264364s" podCreationTimestamp="2025-12-03 09:05:04 +0000 UTC" firstStartedPulling="2025-12-03 09:05:06.285549525 +0000 UTC m=+1513.671526509" lastFinishedPulling="2025-12-03 09:05:08.919923846 +0000 UTC m=+1516.305900830" observedRunningTime="2025-12-03 09:05:09.342396688 +0000 UTC m=+1516.728373672" watchObservedRunningTime="2025-12-03 09:05:09.349264364 +0000 UTC m=+1516.735241388" Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.680791 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:05:09 crc kubenswrapper[4576]: I1203 09:05:09.681071 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:05:10 crc kubenswrapper[4576]: I1203 09:05:10.330543 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerStarted","Data":"ace395093863e7e6f3e772842fbead04b1b5e98f4c7577abbd13f89e62423151"} Dec 03 09:05:11 crc kubenswrapper[4576]: I1203 09:05:11.338635 4576 generic.go:334] "Generic (PLEG): container finished" podID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerID="ace395093863e7e6f3e772842fbead04b1b5e98f4c7577abbd13f89e62423151" exitCode=0 Dec 03 09:05:11 crc kubenswrapper[4576]: I1203 09:05:11.338671 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerDied","Data":"ace395093863e7e6f3e772842fbead04b1b5e98f4c7577abbd13f89e62423151"} Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.608311 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.687166 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util\") pod \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.687235 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhwlr\" (UniqueName: \"kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr\") pod \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.687313 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle\") pod \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\" (UID: \"059fa16e-ef61-475d-927a-8b6fe7ed5c81\") " Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.688895 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle" (OuterVolumeSpecName: "bundle") pod "059fa16e-ef61-475d-927a-8b6fe7ed5c81" (UID: "059fa16e-ef61-475d-927a-8b6fe7ed5c81"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.693711 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr" (OuterVolumeSpecName: "kube-api-access-rhwlr") pod "059fa16e-ef61-475d-927a-8b6fe7ed5c81" (UID: "059fa16e-ef61-475d-927a-8b6fe7ed5c81"). InnerVolumeSpecName "kube-api-access-rhwlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.788419 4576 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.788452 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhwlr\" (UniqueName: \"kubernetes.io/projected/059fa16e-ef61-475d-927a-8b6fe7ed5c81-kube-api-access-rhwlr\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.936851 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util" (OuterVolumeSpecName: "util") pod "059fa16e-ef61-475d-927a-8b6fe7ed5c81" (UID: "059fa16e-ef61-475d-927a-8b6fe7ed5c81"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:12 crc kubenswrapper[4576]: I1203 09:05:12.990734 4576 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/059fa16e-ef61-475d-927a-8b6fe7ed5c81-util\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:13 crc kubenswrapper[4576]: I1203 09:05:13.360783 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" event={"ID":"059fa16e-ef61-475d-927a-8b6fe7ed5c81","Type":"ContainerDied","Data":"638fd37c4d58e7b3cda37ef9f31cae84ba246d52f1942d4353b7df133f6bf15c"} Dec 03 09:05:13 crc kubenswrapper[4576]: I1203 09:05:13.360881 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc" Dec 03 09:05:13 crc kubenswrapper[4576]: I1203 09:05:13.360823 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="638fd37c4d58e7b3cda37ef9f31cae84ba246d52f1942d4353b7df133f6bf15c" Dec 03 09:05:14 crc kubenswrapper[4576]: I1203 09:05:14.832050 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:14 crc kubenswrapper[4576]: I1203 09:05:14.832442 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:14 crc kubenswrapper[4576]: I1203 09:05:14.892688 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:15 crc kubenswrapper[4576]: I1203 09:05:15.416087 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.272766 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:16 crc kubenswrapper[4576]: E1203 09:05:16.273363 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="pull" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273383 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="pull" Dec 03 09:05:16 crc kubenswrapper[4576]: E1203 09:05:16.273418 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="util" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273425 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="util" Dec 03 09:05:16 crc kubenswrapper[4576]: E1203 09:05:16.273434 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273443 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" Dec 03 09:05:16 crc kubenswrapper[4576]: E1203 09:05:16.273456 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="extract" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273462 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="extract" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273613 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb91673-2622-41a5-91d3-a79e0ba7289b" containerName="console" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.273640 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="059fa16e-ef61-475d-927a-8b6fe7ed5c81" containerName="extract" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.274550 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.313703 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.357692 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.357774 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.357811 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkkcz\" (UniqueName: \"kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.458577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.458667 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkkcz\" (UniqueName: \"kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.458731 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.459140 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.459246 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.485441 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkkcz\" (UniqueName: \"kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz\") pod \"certified-operators-tkj2w\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.592821 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:16 crc kubenswrapper[4576]: I1203 09:05:16.867905 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:17 crc kubenswrapper[4576]: I1203 09:05:17.382244 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerStarted","Data":"ce2fcb0d575d406915de43ffed37259dff1d55b4cefeb093f49773f6f8d7f16f"} Dec 03 09:05:18 crc kubenswrapper[4576]: I1203 09:05:18.242244 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:18 crc kubenswrapper[4576]: I1203 09:05:18.387230 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nqv7l" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="registry-server" containerID="cri-o://91f07b4067b7cd3ce0961c7326a67ed6b5b0c1ae4242e5f8a7443990592e2c86" gracePeriod=2 Dec 03 09:05:19 crc kubenswrapper[4576]: I1203 09:05:19.392501 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerStarted","Data":"7ab14d42df9ea662da3b0e2e4da5ccafabc30893b99f03d5e9fc7a35ba35e33d"} Dec 03 09:05:20 crc kubenswrapper[4576]: I1203 09:05:20.400516 4576 generic.go:334] "Generic (PLEG): container finished" podID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerID="91f07b4067b7cd3ce0961c7326a67ed6b5b0c1ae4242e5f8a7443990592e2c86" exitCode=0 Dec 03 09:05:20 crc kubenswrapper[4576]: I1203 09:05:20.400579 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerDied","Data":"91f07b4067b7cd3ce0961c7326a67ed6b5b0c1ae4242e5f8a7443990592e2c86"} Dec 03 09:05:20 crc kubenswrapper[4576]: I1203 09:05:20.402230 4576 generic.go:334] "Generic (PLEG): container finished" podID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerID="7ab14d42df9ea662da3b0e2e4da5ccafabc30893b99f03d5e9fc7a35ba35e33d" exitCode=0 Dec 03 09:05:20 crc kubenswrapper[4576]: I1203 09:05:20.402278 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerDied","Data":"7ab14d42df9ea662da3b0e2e4da5ccafabc30893b99f03d5e9fc7a35ba35e33d"} Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.126658 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.227672 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities\") pod \"e0935bbc-4f84-4c7d-9372-df42c84833bb\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.227771 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99g7p\" (UniqueName: \"kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p\") pod \"e0935bbc-4f84-4c7d-9372-df42c84833bb\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.227876 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content\") pod \"e0935bbc-4f84-4c7d-9372-df42c84833bb\" (UID: \"e0935bbc-4f84-4c7d-9372-df42c84833bb\") " Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.228811 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities" (OuterVolumeSpecName: "utilities") pod "e0935bbc-4f84-4c7d-9372-df42c84833bb" (UID: "e0935bbc-4f84-4c7d-9372-df42c84833bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.243779 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p" (OuterVolumeSpecName: "kube-api-access-99g7p") pod "e0935bbc-4f84-4c7d-9372-df42c84833bb" (UID: "e0935bbc-4f84-4c7d-9372-df42c84833bb"). InnerVolumeSpecName "kube-api-access-99g7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.254142 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0935bbc-4f84-4c7d-9372-df42c84833bb" (UID: "e0935bbc-4f84-4c7d-9372-df42c84833bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.329222 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99g7p\" (UniqueName: \"kubernetes.io/projected/e0935bbc-4f84-4c7d-9372-df42c84833bb-kube-api-access-99g7p\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.329267 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.329278 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0935bbc-4f84-4c7d-9372-df42c84833bb-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.410494 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqv7l" event={"ID":"e0935bbc-4f84-4c7d-9372-df42c84833bb","Type":"ContainerDied","Data":"215819c7ff0d77aea9dc471855d676919af92737c25e604936c2d3ddb52e11db"} Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.410584 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqv7l" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.410590 4576 scope.go:117] "RemoveContainer" containerID="91f07b4067b7cd3ce0961c7326a67ed6b5b0c1ae4242e5f8a7443990592e2c86" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.455901 4576 scope.go:117] "RemoveContainer" containerID="cf183f545285dea5a7f4afb0f0de0490a212148b6f03abcb75b1af121c5153f7" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.477850 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.477596 4576 scope.go:117] "RemoveContainer" containerID="c5960a1fac38abd5a7b596d44a6a549fc135d1ee73785bc0e1bef8d2ea480920" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.482788 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqv7l"] Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.684791 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" path="/var/lib/kubelet/pods/e0935bbc-4f84-4c7d-9372-df42c84833bb/volumes" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.777843 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb"] Dec 03 09:05:21 crc kubenswrapper[4576]: E1203 09:05:21.778371 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="extract-utilities" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.778396 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="extract-utilities" Dec 03 09:05:21 crc kubenswrapper[4576]: E1203 09:05:21.778415 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="registry-server" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.778421 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="registry-server" Dec 03 09:05:21 crc kubenswrapper[4576]: E1203 09:05:21.778431 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="extract-content" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.778437 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="extract-content" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.778569 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0935bbc-4f84-4c7d-9372-df42c84833bb" containerName="registry-server" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.779005 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.782054 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-9m97b" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.784239 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.788433 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.788500 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.788510 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.800658 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb"] Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.843185 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-apiservice-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.843260 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2fbv\" (UniqueName: \"kubernetes.io/projected/4641f310-7d84-4f47-8250-5551fc71ae77-kube-api-access-j2fbv\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.843295 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-webhook-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.944345 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2fbv\" (UniqueName: \"kubernetes.io/projected/4641f310-7d84-4f47-8250-5551fc71ae77-kube-api-access-j2fbv\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.944429 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-webhook-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.944471 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-apiservice-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.952386 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-webhook-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.952839 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4641f310-7d84-4f47-8250-5551fc71ae77-apiservice-cert\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:21 crc kubenswrapper[4576]: I1203 09:05:21.975130 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2fbv\" (UniqueName: \"kubernetes.io/projected/4641f310-7d84-4f47-8250-5551fc71ae77-kube-api-access-j2fbv\") pod \"metallb-operator-controller-manager-6b55b5ccff-rm7kb\" (UID: \"4641f310-7d84-4f47-8250-5551fc71ae77\") " pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.015068 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b"] Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.015834 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.018266 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.018279 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.018703 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xjf5j" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.035556 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b"] Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.091970 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.146834 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-webhook-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.146910 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-apiservice-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.146940 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl7xf\" (UniqueName: \"kubernetes.io/projected/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-kube-api-access-gl7xf\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.252936 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-webhook-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.253290 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-apiservice-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.253322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl7xf\" (UniqueName: \"kubernetes.io/projected/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-kube-api-access-gl7xf\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.259580 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-apiservice-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.262254 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-webhook-cert\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.288503 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl7xf\" (UniqueName: \"kubernetes.io/projected/8f7f1ce8-dc0b-4508-a9aa-2527f55973ff-kube-api-access-gl7xf\") pod \"metallb-operator-webhook-server-7fdfc49798-njl4b\" (UID: \"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff\") " pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.329678 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.407079 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb"] Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.474265 4576 generic.go:334] "Generic (PLEG): container finished" podID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerID="6cc96e801b82042cae02bb456263539c23cd83831896adcce8ac67f8fb9de325" exitCode=0 Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.474320 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerDied","Data":"6cc96e801b82042cae02bb456263539c23cd83831896adcce8ac67f8fb9de325"} Dec 03 09:05:22 crc kubenswrapper[4576]: I1203 09:05:22.728680 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b"] Dec 03 09:05:23 crc kubenswrapper[4576]: I1203 09:05:23.486746 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" event={"ID":"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff","Type":"ContainerStarted","Data":"2d857d8764f4a83a725c8a4a07378c0525d2e461d90853b65174c62e7783e95c"} Dec 03 09:05:23 crc kubenswrapper[4576]: I1203 09:05:23.498088 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerStarted","Data":"f6743dc3685176703428b3ce3f2f895561c20a3115e5bbdc0bea8d7b5734cdca"} Dec 03 09:05:23 crc kubenswrapper[4576]: I1203 09:05:23.502142 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" event={"ID":"4641f310-7d84-4f47-8250-5551fc71ae77","Type":"ContainerStarted","Data":"0aae3635e23e8a707f6b7c848b23511baacd487125535242ca1b2e46a6589f57"} Dec 03 09:05:23 crc kubenswrapper[4576]: I1203 09:05:23.521222 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tkj2w" podStartSLOduration=4.9794024740000005 podStartE2EDuration="7.52120394s" podCreationTimestamp="2025-12-03 09:05:16 +0000 UTC" firstStartedPulling="2025-12-03 09:05:20.403760588 +0000 UTC m=+1527.789737572" lastFinishedPulling="2025-12-03 09:05:22.945562054 +0000 UTC m=+1530.331539038" observedRunningTime="2025-12-03 09:05:23.520911122 +0000 UTC m=+1530.906888106" watchObservedRunningTime="2025-12-03 09:05:23.52120394 +0000 UTC m=+1530.907180924" Dec 03 09:05:26 crc kubenswrapper[4576]: I1203 09:05:26.594062 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:26 crc kubenswrapper[4576]: I1203 09:05:26.594664 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:26 crc kubenswrapper[4576]: I1203 09:05:26.760353 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:31 crc kubenswrapper[4576]: I1203 09:05:31.655233 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" event={"ID":"8f7f1ce8-dc0b-4508-a9aa-2527f55973ff","Type":"ContainerStarted","Data":"d44d3dabd9445586eec15c2b6c4f40b44ca30515b8d104b884bebca2f5f5f65a"} Dec 03 09:05:31 crc kubenswrapper[4576]: I1203 09:05:31.656586 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" event={"ID":"4641f310-7d84-4f47-8250-5551fc71ae77","Type":"ContainerStarted","Data":"6ef16b67ae0d0d8240d2f948508d107a30e83b668c097bf577acbf91da02e6bd"} Dec 03 09:05:32 crc kubenswrapper[4576]: I1203 09:05:32.661960 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:05:32 crc kubenswrapper[4576]: I1203 09:05:32.662558 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:32 crc kubenswrapper[4576]: I1203 09:05:32.689955 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" podStartSLOduration=3.540030209 podStartE2EDuration="11.68993387s" podCreationTimestamp="2025-12-03 09:05:21 +0000 UTC" firstStartedPulling="2025-12-03 09:05:22.468559008 +0000 UTC m=+1529.854535992" lastFinishedPulling="2025-12-03 09:05:30.618462669 +0000 UTC m=+1538.004439653" observedRunningTime="2025-12-03 09:05:32.686843976 +0000 UTC m=+1540.072820980" watchObservedRunningTime="2025-12-03 09:05:32.68993387 +0000 UTC m=+1540.075910854" Dec 03 09:05:32 crc kubenswrapper[4576]: I1203 09:05:32.711543 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" podStartSLOduration=3.827841126 podStartE2EDuration="11.711501253s" podCreationTimestamp="2025-12-03 09:05:21 +0000 UTC" firstStartedPulling="2025-12-03 09:05:22.741693569 +0000 UTC m=+1530.127670563" lastFinishedPulling="2025-12-03 09:05:30.625353696 +0000 UTC m=+1538.011330690" observedRunningTime="2025-12-03 09:05:32.705963023 +0000 UTC m=+1540.091940017" watchObservedRunningTime="2025-12-03 09:05:32.711501253 +0000 UTC m=+1540.097478237" Dec 03 09:05:36 crc kubenswrapper[4576]: I1203 09:05:36.677469 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:39 crc kubenswrapper[4576]: I1203 09:05:39.133579 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:39 crc kubenswrapper[4576]: I1203 09:05:39.134019 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tkj2w" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="registry-server" containerID="cri-o://f6743dc3685176703428b3ce3f2f895561c20a3115e5bbdc0bea8d7b5734cdca" gracePeriod=2 Dec 03 09:05:39 crc kubenswrapper[4576]: I1203 09:05:39.680485 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:05:39 crc kubenswrapper[4576]: I1203 09:05:39.680942 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:05:42 crc kubenswrapper[4576]: I1203 09:05:42.339301 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7fdfc49798-njl4b" Dec 03 09:05:42 crc kubenswrapper[4576]: I1203 09:05:42.774673 4576 generic.go:334] "Generic (PLEG): container finished" podID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerID="f6743dc3685176703428b3ce3f2f895561c20a3115e5bbdc0bea8d7b5734cdca" exitCode=0 Dec 03 09:05:42 crc kubenswrapper[4576]: I1203 09:05:42.774920 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerDied","Data":"f6743dc3685176703428b3ce3f2f895561c20a3115e5bbdc0bea8d7b5734cdca"} Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.289901 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.375100 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkkcz\" (UniqueName: \"kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz\") pod \"6ed77050-8fa6-4b6f-ae98-27731822eb16\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.375169 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content\") pod \"6ed77050-8fa6-4b6f-ae98-27731822eb16\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.375229 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities\") pod \"6ed77050-8fa6-4b6f-ae98-27731822eb16\" (UID: \"6ed77050-8fa6-4b6f-ae98-27731822eb16\") " Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.376491 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities" (OuterVolumeSpecName: "utilities") pod "6ed77050-8fa6-4b6f-ae98-27731822eb16" (UID: "6ed77050-8fa6-4b6f-ae98-27731822eb16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.390474 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz" (OuterVolumeSpecName: "kube-api-access-nkkcz") pod "6ed77050-8fa6-4b6f-ae98-27731822eb16" (UID: "6ed77050-8fa6-4b6f-ae98-27731822eb16"). InnerVolumeSpecName "kube-api-access-nkkcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.422042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ed77050-8fa6-4b6f-ae98-27731822eb16" (UID: "6ed77050-8fa6-4b6f-ae98-27731822eb16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.476554 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkkcz\" (UniqueName: \"kubernetes.io/projected/6ed77050-8fa6-4b6f-ae98-27731822eb16-kube-api-access-nkkcz\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.476588 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.476600 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ed77050-8fa6-4b6f-ae98-27731822eb16-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.782923 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tkj2w" event={"ID":"6ed77050-8fa6-4b6f-ae98-27731822eb16","Type":"ContainerDied","Data":"ce2fcb0d575d406915de43ffed37259dff1d55b4cefeb093f49773f6f8d7f16f"} Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.783024 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tkj2w" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.783048 4576 scope.go:117] "RemoveContainer" containerID="f6743dc3685176703428b3ce3f2f895561c20a3115e5bbdc0bea8d7b5734cdca" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.800847 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.806664 4576 scope.go:117] "RemoveContainer" containerID="6cc96e801b82042cae02bb456263539c23cd83831896adcce8ac67f8fb9de325" Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.808847 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tkj2w"] Dec 03 09:05:43 crc kubenswrapper[4576]: I1203 09:05:43.825306 4576 scope.go:117] "RemoveContainer" containerID="7ab14d42df9ea662da3b0e2e4da5ccafabc30893b99f03d5e9fc7a35ba35e33d" Dec 03 09:05:45 crc kubenswrapper[4576]: I1203 09:05:45.698228 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" path="/var/lib/kubelet/pods/6ed77050-8fa6-4b6f-ae98-27731822eb16/volumes" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.098137 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6b55b5ccff-rm7kb" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.994771 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-jhd6k"] Dec 03 09:06:02 crc kubenswrapper[4576]: E1203 09:06:02.995154 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="extract-content" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.995177 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="extract-content" Dec 03 09:06:02 crc kubenswrapper[4576]: E1203 09:06:02.995205 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="registry-server" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.995214 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="registry-server" Dec 03 09:06:02 crc kubenswrapper[4576]: E1203 09:06:02.995234 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="extract-utilities" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.995242 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="extract-utilities" Dec 03 09:06:02 crc kubenswrapper[4576]: I1203 09:06:02.995381 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ed77050-8fa6-4b6f-ae98-27731822eb16" containerName="registry-server" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.008906 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6"] Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.009376 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.010013 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.018818 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.019964 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mvzdd" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.020378 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.020573 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.022069 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6"] Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.102654 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics-certs\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103058 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-startup\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103087 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq6rx\" (UniqueName: \"kubernetes.io/projected/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-kube-api-access-cq6rx\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103117 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103261 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-sockets\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103291 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w8v9\" (UniqueName: \"kubernetes.io/projected/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-kube-api-access-7w8v9\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103332 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103355 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-conf\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.103375 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-reloader\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.115845 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8244f"] Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.117351 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.125240 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hxjzd" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.125465 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.125622 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.129310 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.146732 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-9fpwm"] Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.147903 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.150265 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.160044 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-9fpwm"] Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204731 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq6rx\" (UniqueName: \"kubernetes.io/projected/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-kube-api-access-cq6rx\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204785 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-startup\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204814 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204863 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-sockets\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204884 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8sxt\" (UniqueName: \"kubernetes.io/projected/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-kube-api-access-m8sxt\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204939 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w8v9\" (UniqueName: \"kubernetes.io/projected/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-kube-api-access-7w8v9\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.204977 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205000 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-conf\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205014 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-reloader\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205034 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metallb-excludel2\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205054 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205072 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205100 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics-certs\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205427 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205507 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-conf\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205776 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-reloader\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205804 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-startup\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.205911 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-frr-sockets\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.206171 4576 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.206412 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert podName:ea79aacc-f31f-43cd-a0a4-151d9a4703e4 nodeName:}" failed. No retries permitted until 2025-12-03 09:06:03.706354719 +0000 UTC m=+1571.092331703 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert") pod "frr-k8s-webhook-server-7fcb986d4-rjsk6" (UID: "ea79aacc-f31f-43cd-a0a4-151d9a4703e4") : secret "frr-k8s-webhook-server-cert" not found Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.228668 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-metrics-certs\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.230314 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq6rx\" (UniqueName: \"kubernetes.io/projected/f893c93f-566c-4c25-8e2b-48c3d73ca5fd-kube-api-access-cq6rx\") pod \"frr-k8s-jhd6k\" (UID: \"f893c93f-566c-4c25-8e2b-48c3d73ca5fd\") " pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.233290 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w8v9\" (UniqueName: \"kubernetes.io/projected/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-kube-api-access-7w8v9\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306307 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metallb-excludel2\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306359 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306385 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306409 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wgms\" (UniqueName: \"kubernetes.io/projected/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-kube-api-access-2wgms\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306464 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-metrics-certs\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306484 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8sxt\" (UniqueName: \"kubernetes.io/projected/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-kube-api-access-m8sxt\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.306501 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-cert\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.307217 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metallb-excludel2\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.307314 4576 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.307359 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs podName:7dc75631-a77b-4157-9abb-ba8ea06fb5dd nodeName:}" failed. No retries permitted until 2025-12-03 09:06:03.807345441 +0000 UTC m=+1571.193322425 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs") pod "speaker-8244f" (UID: "7dc75631-a77b-4157-9abb-ba8ea06fb5dd") : secret "speaker-certs-secret" not found Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.307399 4576 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.307439 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist podName:7dc75631-a77b-4157-9abb-ba8ea06fb5dd nodeName:}" failed. No retries permitted until 2025-12-03 09:06:03.807433373 +0000 UTC m=+1571.193410357 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist") pod "speaker-8244f" (UID: "7dc75631-a77b-4157-9abb-ba8ea06fb5dd") : secret "metallb-memberlist" not found Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.325605 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8sxt\" (UniqueName: \"kubernetes.io/projected/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-kube-api-access-m8sxt\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.338667 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.408330 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-metrics-certs\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.408564 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-cert\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.408735 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wgms\" (UniqueName: \"kubernetes.io/projected/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-kube-api-access-2wgms\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.411797 4576 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.414204 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-metrics-certs\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.423141 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-cert\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.433820 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wgms\" (UniqueName: \"kubernetes.io/projected/35fce6e6-ceb3-4844-ad2e-fb7454c2e425-kube-api-access-2wgms\") pod \"controller-f8648f98b-9fpwm\" (UID: \"35fce6e6-ceb3-4844-ad2e-fb7454c2e425\") " pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.464605 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.713949 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.719468 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ea79aacc-f31f-43cd-a0a4-151d9a4703e4-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rjsk6\" (UID: \"ea79aacc-f31f-43cd-a0a4-151d9a4703e4\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.735305 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-9fpwm"] Dec 03 09:06:03 crc kubenswrapper[4576]: W1203 09:06:03.739029 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35fce6e6_ceb3_4844_ad2e_fb7454c2e425.slice/crio-e90580ab457df0194e3a0c70015ca9cc504149fd03cb052d27c59edafd3ed225 WatchSource:0}: Error finding container e90580ab457df0194e3a0c70015ca9cc504149fd03cb052d27c59edafd3ed225: Status 404 returned error can't find the container with id e90580ab457df0194e3a0c70015ca9cc504149fd03cb052d27c59edafd3ed225 Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.816022 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.816076 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.816191 4576 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 09:06:03 crc kubenswrapper[4576]: E1203 09:06:03.816244 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist podName:7dc75631-a77b-4157-9abb-ba8ea06fb5dd nodeName:}" failed. No retries permitted until 2025-12-03 09:06:04.816226711 +0000 UTC m=+1572.202203695 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist") pod "speaker-8244f" (UID: "7dc75631-a77b-4157-9abb-ba8ea06fb5dd") : secret "metallb-memberlist" not found Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.822185 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-metrics-certs\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.911440 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"cc0e960469d65dce05c71ee0420e2384013d07a20590181c0ee2c0d6ac6e76a6"} Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.913761 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-9fpwm" event={"ID":"35fce6e6-ceb3-4844-ad2e-fb7454c2e425","Type":"ContainerStarted","Data":"d25ce5749f4374acdbe155ddd9a63a20373b9076aa4d57fba085ed90a375235a"} Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.913812 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-9fpwm" event={"ID":"35fce6e6-ceb3-4844-ad2e-fb7454c2e425","Type":"ContainerStarted","Data":"e90580ab457df0194e3a0c70015ca9cc504149fd03cb052d27c59edafd3ed225"} Dec 03 09:06:03 crc kubenswrapper[4576]: I1203 09:06:03.961791 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.176216 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6"] Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.830517 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.843051 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7dc75631-a77b-4157-9abb-ba8ea06fb5dd-memberlist\") pod \"speaker-8244f\" (UID: \"7dc75631-a77b-4157-9abb-ba8ea06fb5dd\") " pod="metallb-system/speaker-8244f" Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.922201 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" event={"ID":"ea79aacc-f31f-43cd-a0a4-151d9a4703e4","Type":"ContainerStarted","Data":"481248fd6bc94f7f0c1e5cbb0b8cfb7d563c7c227e7d2d970e036d96559e2f62"} Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.924692 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-9fpwm" event={"ID":"35fce6e6-ceb3-4844-ad2e-fb7454c2e425","Type":"ContainerStarted","Data":"e7aa482ba1065f2bbdba77cf2e1cfa135c3cee27f597816d0543a40b1a892e42"} Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.924825 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.935001 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8244f" Dec 03 09:06:04 crc kubenswrapper[4576]: W1203 09:06:04.959066 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc75631_a77b_4157_9abb_ba8ea06fb5dd.slice/crio-e10a497b5f838c54d1aa13387b03d24671f73f8838c785ea1b0a9580b0eccc3a WatchSource:0}: Error finding container e10a497b5f838c54d1aa13387b03d24671f73f8838c785ea1b0a9580b0eccc3a: Status 404 returned error can't find the container with id e10a497b5f838c54d1aa13387b03d24671f73f8838c785ea1b0a9580b0eccc3a Dec 03 09:06:04 crc kubenswrapper[4576]: I1203 09:06:04.960137 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-9fpwm" podStartSLOduration=1.960062621 podStartE2EDuration="1.960062621s" podCreationTimestamp="2025-12-03 09:06:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:06:04.958520229 +0000 UTC m=+1572.344497213" watchObservedRunningTime="2025-12-03 09:06:04.960062621 +0000 UTC m=+1572.346039605" Dec 03 09:06:05 crc kubenswrapper[4576]: I1203 09:06:05.943183 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8244f" event={"ID":"7dc75631-a77b-4157-9abb-ba8ea06fb5dd","Type":"ContainerStarted","Data":"1cb1e5e1ee742143def3b3e5efe6a8fd2e00f4eddf038165997581e421813459"} Dec 03 09:06:05 crc kubenswrapper[4576]: I1203 09:06:05.943757 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8244f" event={"ID":"7dc75631-a77b-4157-9abb-ba8ea06fb5dd","Type":"ContainerStarted","Data":"5fec436f974e8c54b3319e3198ce5ba8f0c8dba515273ef50023f9c29700eab1"} Dec 03 09:06:05 crc kubenswrapper[4576]: I1203 09:06:05.943773 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8244f" event={"ID":"7dc75631-a77b-4157-9abb-ba8ea06fb5dd","Type":"ContainerStarted","Data":"e10a497b5f838c54d1aa13387b03d24671f73f8838c785ea1b0a9580b0eccc3a"} Dec 03 09:06:05 crc kubenswrapper[4576]: I1203 09:06:05.944541 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8244f" Dec 03 09:06:06 crc kubenswrapper[4576]: I1203 09:06:06.010326 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8244f" podStartSLOduration=3.010307799 podStartE2EDuration="3.010307799s" podCreationTimestamp="2025-12-03 09:06:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:06:06.006615209 +0000 UTC m=+1573.392592193" watchObservedRunningTime="2025-12-03 09:06:06.010307799 +0000 UTC m=+1573.396284783" Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.683701 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.684003 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.694364 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.695090 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.695161 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" gracePeriod=600 Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.975685 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" exitCode=0 Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.975739 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91"} Dec 03 09:06:09 crc kubenswrapper[4576]: I1203 09:06:09.975783 4576 scope.go:117] "RemoveContainer" containerID="d22e42ceade7a96f65671f9060e7f04e4a04dc671101869aefa6c525f8d96e1a" Dec 03 09:06:13 crc kubenswrapper[4576]: I1203 09:06:13.470191 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-9fpwm" Dec 03 09:06:13 crc kubenswrapper[4576]: E1203 09:06:13.806802 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:06:14 crc kubenswrapper[4576]: I1203 09:06:14.001167 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:06:14 crc kubenswrapper[4576]: E1203 09:06:14.001599 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:06:15 crc kubenswrapper[4576]: I1203 09:06:15.009484 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" event={"ID":"ea79aacc-f31f-43cd-a0a4-151d9a4703e4","Type":"ContainerStarted","Data":"801522bb8d9abd5fab9bc3dcc84f78622b613164f183fccc51a2b260738a40ce"} Dec 03 09:06:15 crc kubenswrapper[4576]: I1203 09:06:15.009927 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:15 crc kubenswrapper[4576]: I1203 09:06:15.011592 4576 generic.go:334] "Generic (PLEG): container finished" podID="f893c93f-566c-4c25-8e2b-48c3d73ca5fd" containerID="0cbc17af0cf29985f9cc08e64b78a6c5f763a5d267e2e8a30a0ffd2a5328efe9" exitCode=0 Dec 03 09:06:15 crc kubenswrapper[4576]: I1203 09:06:15.011642 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerDied","Data":"0cbc17af0cf29985f9cc08e64b78a6c5f763a5d267e2e8a30a0ffd2a5328efe9"} Dec 03 09:06:15 crc kubenswrapper[4576]: I1203 09:06:15.030465 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" podStartSLOduration=3.234089745 podStartE2EDuration="13.030440067s" podCreationTimestamp="2025-12-03 09:06:02 +0000 UTC" firstStartedPulling="2025-12-03 09:06:04.208760952 +0000 UTC m=+1571.594737936" lastFinishedPulling="2025-12-03 09:06:14.005111274 +0000 UTC m=+1581.391088258" observedRunningTime="2025-12-03 09:06:15.025101462 +0000 UTC m=+1582.411078486" watchObservedRunningTime="2025-12-03 09:06:15.030440067 +0000 UTC m=+1582.416417051" Dec 03 09:06:16 crc kubenswrapper[4576]: I1203 09:06:16.019861 4576 generic.go:334] "Generic (PLEG): container finished" podID="f893c93f-566c-4c25-8e2b-48c3d73ca5fd" containerID="70fc81e294496395c04b9874fee05985533253016ff9d20f3153ffca8919b176" exitCode=0 Dec 03 09:06:16 crc kubenswrapper[4576]: I1203 09:06:16.019927 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerDied","Data":"70fc81e294496395c04b9874fee05985533253016ff9d20f3153ffca8919b176"} Dec 03 09:06:17 crc kubenswrapper[4576]: I1203 09:06:17.027387 4576 generic.go:334] "Generic (PLEG): container finished" podID="f893c93f-566c-4c25-8e2b-48c3d73ca5fd" containerID="1bc6e23f51445d4ab66f0134a36c95f039c146636c0812cfdfb2686091133b5c" exitCode=0 Dec 03 09:06:17 crc kubenswrapper[4576]: I1203 09:06:17.027436 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerDied","Data":"1bc6e23f51445d4ab66f0134a36c95f039c146636c0812cfdfb2686091133b5c"} Dec 03 09:06:19 crc kubenswrapper[4576]: I1203 09:06:19.046475 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"78b2b46c03e556afa86b4e633001388eccb67fa3383e58685b7fa95022deb003"} Dec 03 09:06:19 crc kubenswrapper[4576]: I1203 09:06:19.046782 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"54cd1b372700dc748e6be553cf5aa77c036598074e6905bf49f4c8cb68f32f85"} Dec 03 09:06:19 crc kubenswrapper[4576]: I1203 09:06:19.046793 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"647bf001668c606c8a079d9c24ba2ddb90a6bf2b22bb64a7f4a2c84f8c9252e3"} Dec 03 09:06:19 crc kubenswrapper[4576]: I1203 09:06:19.046802 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"e6fa57ee2aac2aca921313e20a59f7ee8f5618ae55cad00438fb583b3d884400"} Dec 03 09:06:19 crc kubenswrapper[4576]: I1203 09:06:19.046810 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"22b65dbb665a6b4fd396543edef500fef51feff9d878dcb31661931346163257"} Dec 03 09:06:20 crc kubenswrapper[4576]: I1203 09:06:20.061086 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jhd6k" event={"ID":"f893c93f-566c-4c25-8e2b-48c3d73ca5fd","Type":"ContainerStarted","Data":"55a66c906d680e04269b7a8be5e734e167644661a7becf6027596577b8aecb1a"} Dec 03 09:06:20 crc kubenswrapper[4576]: I1203 09:06:20.061751 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:20 crc kubenswrapper[4576]: I1203 09:06:20.101080 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-jhd6k" podStartSLOduration=7.610326169 podStartE2EDuration="18.101056258s" podCreationTimestamp="2025-12-03 09:06:02 +0000 UTC" firstStartedPulling="2025-12-03 09:06:03.494439294 +0000 UTC m=+1570.880416278" lastFinishedPulling="2025-12-03 09:06:13.985169383 +0000 UTC m=+1581.371146367" observedRunningTime="2025-12-03 09:06:20.098243622 +0000 UTC m=+1587.484220616" watchObservedRunningTime="2025-12-03 09:06:20.101056258 +0000 UTC m=+1587.487033242" Dec 03 09:06:23 crc kubenswrapper[4576]: I1203 09:06:23.339897 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:23 crc kubenswrapper[4576]: I1203 09:06:23.385082 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:24 crc kubenswrapper[4576]: I1203 09:06:24.943625 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8244f" Dec 03 09:06:25 crc kubenswrapper[4576]: I1203 09:06:25.677792 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:06:25 crc kubenswrapper[4576]: E1203 09:06:25.678296 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.038188 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.040366 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.048719 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-r24pv" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.049203 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.050045 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.096777 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs7zl\" (UniqueName: \"kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl\") pod \"openstack-operator-index-ll4rt\" (UID: \"46bccaad-e684-48f5-9f50-edcb65b4165e\") " pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.119166 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.197550 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs7zl\" (UniqueName: \"kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl\") pod \"openstack-operator-index-ll4rt\" (UID: \"46bccaad-e684-48f5-9f50-edcb65b4165e\") " pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.220153 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs7zl\" (UniqueName: \"kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl\") pod \"openstack-operator-index-ll4rt\" (UID: \"46bccaad-e684-48f5-9f50-edcb65b4165e\") " pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.369896 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:28 crc kubenswrapper[4576]: I1203 09:06:28.967141 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:28 crc kubenswrapper[4576]: W1203 09:06:28.973837 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46bccaad_e684_48f5_9f50_edcb65b4165e.slice/crio-42bd07633d876465cb639d299f82db56db9b0c4ec0baeb9447c1b48ab99848fb WatchSource:0}: Error finding container 42bd07633d876465cb639d299f82db56db9b0c4ec0baeb9447c1b48ab99848fb: Status 404 returned error can't find the container with id 42bd07633d876465cb639d299f82db56db9b0c4ec0baeb9447c1b48ab99848fb Dec 03 09:06:29 crc kubenswrapper[4576]: I1203 09:06:29.130379 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ll4rt" event={"ID":"46bccaad-e684-48f5-9f50-edcb65b4165e","Type":"ContainerStarted","Data":"42bd07633d876465cb639d299f82db56db9b0c4ec0baeb9447c1b48ab99848fb"} Dec 03 09:06:31 crc kubenswrapper[4576]: I1203 09:06:31.180204 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:31 crc kubenswrapper[4576]: I1203 09:06:31.794934 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rwqb7"] Dec 03 09:06:31 crc kubenswrapper[4576]: I1203 09:06:31.795624 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rwqb7"] Dec 03 09:06:31 crc kubenswrapper[4576]: I1203 09:06:31.795710 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:31 crc kubenswrapper[4576]: I1203 09:06:31.916452 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm6p9\" (UniqueName: \"kubernetes.io/projected/335bbabf-ec3b-484b-8081-08e919ec8dcd-kube-api-access-sm6p9\") pod \"openstack-operator-index-rwqb7\" (UID: \"335bbabf-ec3b-484b-8081-08e919ec8dcd\") " pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:32 crc kubenswrapper[4576]: I1203 09:06:32.018241 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm6p9\" (UniqueName: \"kubernetes.io/projected/335bbabf-ec3b-484b-8081-08e919ec8dcd-kube-api-access-sm6p9\") pod \"openstack-operator-index-rwqb7\" (UID: \"335bbabf-ec3b-484b-8081-08e919ec8dcd\") " pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:32 crc kubenswrapper[4576]: I1203 09:06:32.042331 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm6p9\" (UniqueName: \"kubernetes.io/projected/335bbabf-ec3b-484b-8081-08e919ec8dcd-kube-api-access-sm6p9\") pod \"openstack-operator-index-rwqb7\" (UID: \"335bbabf-ec3b-484b-8081-08e919ec8dcd\") " pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:32 crc kubenswrapper[4576]: I1203 09:06:32.125493 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:32 crc kubenswrapper[4576]: I1203 09:06:32.462368 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rwqb7"] Dec 03 09:06:32 crc kubenswrapper[4576]: W1203 09:06:32.471101 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod335bbabf_ec3b_484b_8081_08e919ec8dcd.slice/crio-a168d418918409ae196d47768cf7558bdc1da4c1b39266d78a11cbda19f593a1 WatchSource:0}: Error finding container a168d418918409ae196d47768cf7558bdc1da4c1b39266d78a11cbda19f593a1: Status 404 returned error can't find the container with id a168d418918409ae196d47768cf7558bdc1da4c1b39266d78a11cbda19f593a1 Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.157365 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ll4rt" event={"ID":"46bccaad-e684-48f5-9f50-edcb65b4165e","Type":"ContainerStarted","Data":"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca"} Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.157685 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-ll4rt" podUID="46bccaad-e684-48f5-9f50-edcb65b4165e" containerName="registry-server" containerID="cri-o://346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca" gracePeriod=2 Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.163099 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rwqb7" event={"ID":"335bbabf-ec3b-484b-8081-08e919ec8dcd","Type":"ContainerStarted","Data":"2c0a004619e8299998b4b16b8c3c5f2ef20811954e78a79f355d2ea1ad69311e"} Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.163146 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rwqb7" event={"ID":"335bbabf-ec3b-484b-8081-08e919ec8dcd","Type":"ContainerStarted","Data":"a168d418918409ae196d47768cf7558bdc1da4c1b39266d78a11cbda19f593a1"} Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.183278 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ll4rt" podStartSLOduration=3.084225935 podStartE2EDuration="6.183232009s" podCreationTimestamp="2025-12-03 09:06:27 +0000 UTC" firstStartedPulling="2025-12-03 09:06:28.976184533 +0000 UTC m=+1596.362161517" lastFinishedPulling="2025-12-03 09:06:32.075190607 +0000 UTC m=+1599.461167591" observedRunningTime="2025-12-03 09:06:33.179030145 +0000 UTC m=+1600.565007139" watchObservedRunningTime="2025-12-03 09:06:33.183232009 +0000 UTC m=+1600.569208993" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.202983 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rwqb7" podStartSLOduration=2.155785125 podStartE2EDuration="2.202963192s" podCreationTimestamp="2025-12-03 09:06:31 +0000 UTC" firstStartedPulling="2025-12-03 09:06:32.475302323 +0000 UTC m=+1599.861279307" lastFinishedPulling="2025-12-03 09:06:32.52248039 +0000 UTC m=+1599.908457374" observedRunningTime="2025-12-03 09:06:33.201896593 +0000 UTC m=+1600.587873597" watchObservedRunningTime="2025-12-03 09:06:33.202963192 +0000 UTC m=+1600.588940176" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.343157 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-jhd6k" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.508429 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.661358 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs7zl\" (UniqueName: \"kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl\") pod \"46bccaad-e684-48f5-9f50-edcb65b4165e\" (UID: \"46bccaad-e684-48f5-9f50-edcb65b4165e\") " Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.673853 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl" (OuterVolumeSpecName: "kube-api-access-bs7zl") pod "46bccaad-e684-48f5-9f50-edcb65b4165e" (UID: "46bccaad-e684-48f5-9f50-edcb65b4165e"). InnerVolumeSpecName "kube-api-access-bs7zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.764438 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs7zl\" (UniqueName: \"kubernetes.io/projected/46bccaad-e684-48f5-9f50-edcb65b4165e-kube-api-access-bs7zl\") on node \"crc\" DevicePath \"\"" Dec 03 09:06:33 crc kubenswrapper[4576]: I1203 09:06:33.967171 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rjsk6" Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.193357 4576 generic.go:334] "Generic (PLEG): container finished" podID="46bccaad-e684-48f5-9f50-edcb65b4165e" containerID="346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca" exitCode=0 Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.193644 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ll4rt" event={"ID":"46bccaad-e684-48f5-9f50-edcb65b4165e","Type":"ContainerDied","Data":"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca"} Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.193786 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ll4rt" event={"ID":"46bccaad-e684-48f5-9f50-edcb65b4165e","Type":"ContainerDied","Data":"42bd07633d876465cb639d299f82db56db9b0c4ec0baeb9447c1b48ab99848fb"} Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.193795 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ll4rt" Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.193834 4576 scope.go:117] "RemoveContainer" containerID="346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca" Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.217460 4576 scope.go:117] "RemoveContainer" containerID="346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca" Dec 03 09:06:34 crc kubenswrapper[4576]: E1203 09:06:34.218430 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca\": container with ID starting with 346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca not found: ID does not exist" containerID="346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca" Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.218471 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca"} err="failed to get container status \"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca\": rpc error: code = NotFound desc = could not find container \"346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca\": container with ID starting with 346a991fa48b2dd495e5180e9181cbaa1bc6e83b1c208d9dd32f41049ed627ca not found: ID does not exist" Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.220070 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:34 crc kubenswrapper[4576]: I1203 09:06:34.224928 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-ll4rt"] Dec 03 09:06:35 crc kubenswrapper[4576]: I1203 09:06:35.688519 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46bccaad-e684-48f5-9f50-edcb65b4165e" path="/var/lib/kubelet/pods/46bccaad-e684-48f5-9f50-edcb65b4165e/volumes" Dec 03 09:06:37 crc kubenswrapper[4576]: I1203 09:06:37.677778 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:06:37 crc kubenswrapper[4576]: E1203 09:06:37.678297 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:06:42 crc kubenswrapper[4576]: I1203 09:06:42.127510 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:42 crc kubenswrapper[4576]: I1203 09:06:42.127892 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:42 crc kubenswrapper[4576]: I1203 09:06:42.155968 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:42 crc kubenswrapper[4576]: I1203 09:06:42.341239 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-rwqb7" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.025730 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt"] Dec 03 09:06:50 crc kubenswrapper[4576]: E1203 09:06:50.027335 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46bccaad-e684-48f5-9f50-edcb65b4165e" containerName="registry-server" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.027351 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="46bccaad-e684-48f5-9f50-edcb65b4165e" containerName="registry-server" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.027583 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="46bccaad-e684-48f5-9f50-edcb65b4165e" containerName="registry-server" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.029181 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.032985 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt"] Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.033362 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-rjr2j" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.155320 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.155416 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnnz7\" (UniqueName: \"kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.155573 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.256650 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.256716 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnnz7\" (UniqueName: \"kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.256772 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.257408 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.258075 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.285678 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnnz7\" (UniqueName: \"kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.350685 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:50 crc kubenswrapper[4576]: I1203 09:06:50.787301 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt"] Dec 03 09:06:51 crc kubenswrapper[4576]: I1203 09:06:51.375833 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" event={"ID":"e86979bd-954c-45c5-940d-f8e334277a44","Type":"ContainerStarted","Data":"6043be29bb0fe255577a12fa233cdf8e983c48c081079e8d28ce234baa5ba4e1"} Dec 03 09:06:52 crc kubenswrapper[4576]: I1203 09:06:52.384565 4576 generic.go:334] "Generic (PLEG): container finished" podID="e86979bd-954c-45c5-940d-f8e334277a44" containerID="fa282ebf971e1fa2c3f333c07786a89aec87fddfb662b231713359ae86f8edfc" exitCode=0 Dec 03 09:06:52 crc kubenswrapper[4576]: I1203 09:06:52.384664 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" event={"ID":"e86979bd-954c-45c5-940d-f8e334277a44","Type":"ContainerDied","Data":"fa282ebf971e1fa2c3f333c07786a89aec87fddfb662b231713359ae86f8edfc"} Dec 03 09:06:52 crc kubenswrapper[4576]: I1203 09:06:52.676777 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:06:52 crc kubenswrapper[4576]: E1203 09:06:52.677323 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:06:53 crc kubenswrapper[4576]: I1203 09:06:53.394961 4576 generic.go:334] "Generic (PLEG): container finished" podID="e86979bd-954c-45c5-940d-f8e334277a44" containerID="3eace87e2926feaa256609214b86ef24a4d21b9949beeb8ca553c59fbbc8c87a" exitCode=0 Dec 03 09:06:53 crc kubenswrapper[4576]: I1203 09:06:53.395101 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" event={"ID":"e86979bd-954c-45c5-940d-f8e334277a44","Type":"ContainerDied","Data":"3eace87e2926feaa256609214b86ef24a4d21b9949beeb8ca553c59fbbc8c87a"} Dec 03 09:06:54 crc kubenswrapper[4576]: I1203 09:06:54.405460 4576 generic.go:334] "Generic (PLEG): container finished" podID="e86979bd-954c-45c5-940d-f8e334277a44" containerID="0e542eb46537ac678cd02e37b6c77673cb49c035bbb3b584625649a0c27bbf48" exitCode=0 Dec 03 09:06:54 crc kubenswrapper[4576]: I1203 09:06:54.405595 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" event={"ID":"e86979bd-954c-45c5-940d-f8e334277a44","Type":"ContainerDied","Data":"0e542eb46537ac678cd02e37b6c77673cb49c035bbb3b584625649a0c27bbf48"} Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.643306 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.644744 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util\") pod \"e86979bd-954c-45c5-940d-f8e334277a44\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.644785 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnnz7\" (UniqueName: \"kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7\") pod \"e86979bd-954c-45c5-940d-f8e334277a44\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.644844 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle\") pod \"e86979bd-954c-45c5-940d-f8e334277a44\" (UID: \"e86979bd-954c-45c5-940d-f8e334277a44\") " Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.647562 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle" (OuterVolumeSpecName: "bundle") pod "e86979bd-954c-45c5-940d-f8e334277a44" (UID: "e86979bd-954c-45c5-940d-f8e334277a44"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.651200 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7" (OuterVolumeSpecName: "kube-api-access-jnnz7") pod "e86979bd-954c-45c5-940d-f8e334277a44" (UID: "e86979bd-954c-45c5-940d-f8e334277a44"). InnerVolumeSpecName "kube-api-access-jnnz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.667350 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util" (OuterVolumeSpecName: "util") pod "e86979bd-954c-45c5-940d-f8e334277a44" (UID: "e86979bd-954c-45c5-940d-f8e334277a44"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.747485 4576 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-util\") on node \"crc\" DevicePath \"\"" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.747517 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnnz7\" (UniqueName: \"kubernetes.io/projected/e86979bd-954c-45c5-940d-f8e334277a44-kube-api-access-jnnz7\") on node \"crc\" DevicePath \"\"" Dec 03 09:06:55 crc kubenswrapper[4576]: I1203 09:06:55.747546 4576 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e86979bd-954c-45c5-940d-f8e334277a44-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:06:56 crc kubenswrapper[4576]: I1203 09:06:56.426487 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" event={"ID":"e86979bd-954c-45c5-940d-f8e334277a44","Type":"ContainerDied","Data":"6043be29bb0fe255577a12fa233cdf8e983c48c081079e8d28ce234baa5ba4e1"} Dec 03 09:06:56 crc kubenswrapper[4576]: I1203 09:06:56.426602 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt" Dec 03 09:06:56 crc kubenswrapper[4576]: I1203 09:06:56.426589 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6043be29bb0fe255577a12fa233cdf8e983c48c081079e8d28ce234baa5ba4e1" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.142820 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb"] Dec 03 09:07:02 crc kubenswrapper[4576]: E1203 09:07:02.143451 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="extract" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.143463 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="extract" Dec 03 09:07:02 crc kubenswrapper[4576]: E1203 09:07:02.143482 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="pull" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.143487 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="pull" Dec 03 09:07:02 crc kubenswrapper[4576]: E1203 09:07:02.143498 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="util" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.143503 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="util" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.143623 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e86979bd-954c-45c5-940d-f8e334277a44" containerName="extract" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.144122 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.147008 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6nn4t" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.162571 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbgtx\" (UniqueName: \"kubernetes.io/projected/b30928c7-4b2d-4fec-81f3-9237336e8d81-kube-api-access-jbgtx\") pod \"openstack-operator-controller-operator-6757ffd54f-4wfmb\" (UID: \"b30928c7-4b2d-4fec-81f3-9237336e8d81\") " pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.168729 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb"] Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.264102 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbgtx\" (UniqueName: \"kubernetes.io/projected/b30928c7-4b2d-4fec-81f3-9237336e8d81-kube-api-access-jbgtx\") pod \"openstack-operator-controller-operator-6757ffd54f-4wfmb\" (UID: \"b30928c7-4b2d-4fec-81f3-9237336e8d81\") " pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.287020 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbgtx\" (UniqueName: \"kubernetes.io/projected/b30928c7-4b2d-4fec-81f3-9237336e8d81-kube-api-access-jbgtx\") pod \"openstack-operator-controller-operator-6757ffd54f-4wfmb\" (UID: \"b30928c7-4b2d-4fec-81f3-9237336e8d81\") " pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.461357 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:02 crc kubenswrapper[4576]: I1203 09:07:02.743587 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb"] Dec 03 09:07:03 crc kubenswrapper[4576]: I1203 09:07:03.475219 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" event={"ID":"b30928c7-4b2d-4fec-81f3-9237336e8d81","Type":"ContainerStarted","Data":"8bd10b67b180254a50511e0d3e4f6246aa6028196be75c6ce261dd23c48ee70e"} Dec 03 09:07:07 crc kubenswrapper[4576]: I1203 09:07:07.677466 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:07:07 crc kubenswrapper[4576]: E1203 09:07:07.677980 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:07:10 crc kubenswrapper[4576]: I1203 09:07:10.625383 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" event={"ID":"b30928c7-4b2d-4fec-81f3-9237336e8d81","Type":"ContainerStarted","Data":"81e420c2908da356b977bacaefce2c90f8c843db3d71d1788b66525d19fb7746"} Dec 03 09:07:10 crc kubenswrapper[4576]: I1203 09:07:10.626175 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:10 crc kubenswrapper[4576]: I1203 09:07:10.658948 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" podStartSLOduration=1.079055635 podStartE2EDuration="8.658905642s" podCreationTimestamp="2025-12-03 09:07:02 +0000 UTC" firstStartedPulling="2025-12-03 09:07:02.759623252 +0000 UTC m=+1630.145600236" lastFinishedPulling="2025-12-03 09:07:10.339473239 +0000 UTC m=+1637.725450243" observedRunningTime="2025-12-03 09:07:10.655420908 +0000 UTC m=+1638.041397912" watchObservedRunningTime="2025-12-03 09:07:10.658905642 +0000 UTC m=+1638.044882646" Dec 03 09:07:18 crc kubenswrapper[4576]: I1203 09:07:18.677194 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:07:18 crc kubenswrapper[4576]: E1203 09:07:18.679082 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:07:22 crc kubenswrapper[4576]: I1203 09:07:22.478164 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6757ffd54f-4wfmb" Dec 03 09:07:31 crc kubenswrapper[4576]: I1203 09:07:31.677779 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:07:31 crc kubenswrapper[4576]: E1203 09:07:31.678551 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:07:44 crc kubenswrapper[4576]: I1203 09:07:44.677338 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:07:44 crc kubenswrapper[4576]: E1203 09:07:44.678072 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.069991 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.071762 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.094413 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.095945 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.098958 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-w89hv" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.107985 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-rr2s2" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.108577 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.122158 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.123576 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.128571 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6zbcl" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.133632 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.158829 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.182500 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.183605 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.190136 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-nmwxn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.190537 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.193340 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td6h7\" (UniqueName: \"kubernetes.io/projected/75840987-c6e3-45e6-912c-85771c498e41-kube-api-access-td6h7\") pod \"designate-operator-controller-manager-78b4bc895b-n78hw\" (UID: \"75840987-c6e3-45e6-912c-85771c498e41\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.193413 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt8qp\" (UniqueName: \"kubernetes.io/projected/cce29053-f3ed-4dce-a362-c99b4aa31102-kube-api-access-vt8qp\") pod \"cinder-operator-controller-manager-859b6ccc6-wqx97\" (UID: \"cce29053-f3ed-4dce-a362-c99b4aa31102\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.193462 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqcfc\" (UniqueName: \"kubernetes.io/projected/3ea8e44c-f5b3-4b92-94ed-04954472481c-kube-api-access-kqcfc\") pod \"barbican-operator-controller-manager-7d9dfd778-4mznn\" (UID: \"3ea8e44c-f5b3-4b92-94ed-04954472481c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.193509 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n592d\" (UniqueName: \"kubernetes.io/projected/df68290e-5853-4fce-903f-354ea9f740e1-kube-api-access-n592d\") pod \"glance-operator-controller-manager-77987cd8cd-zghdb\" (UID: \"df68290e-5853-4fce-903f-354ea9f740e1\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.221228 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.226503 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.241190 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-5tgjg" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.246115 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.259237 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.260331 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.267915 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-46wm2" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.283037 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294170 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td6h7\" (UniqueName: \"kubernetes.io/projected/75840987-c6e3-45e6-912c-85771c498e41-kube-api-access-td6h7\") pod \"designate-operator-controller-manager-78b4bc895b-n78hw\" (UID: \"75840987-c6e3-45e6-912c-85771c498e41\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294218 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt8qp\" (UniqueName: \"kubernetes.io/projected/cce29053-f3ed-4dce-a362-c99b4aa31102-kube-api-access-vt8qp\") pod \"cinder-operator-controller-manager-859b6ccc6-wqx97\" (UID: \"cce29053-f3ed-4dce-a362-c99b4aa31102\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294258 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clm7n\" (UniqueName: \"kubernetes.io/projected/649283f6-ebcd-45a0-974f-e9c14138fa46-kube-api-access-clm7n\") pod \"heat-operator-controller-manager-5f64f6f8bb-nsggq\" (UID: \"649283f6-ebcd-45a0-974f-e9c14138fa46\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294302 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqcfc\" (UniqueName: \"kubernetes.io/projected/3ea8e44c-f5b3-4b92-94ed-04954472481c-kube-api-access-kqcfc\") pod \"barbican-operator-controller-manager-7d9dfd778-4mznn\" (UID: \"3ea8e44c-f5b3-4b92-94ed-04954472481c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294338 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n592d\" (UniqueName: \"kubernetes.io/projected/df68290e-5853-4fce-903f-354ea9f740e1-kube-api-access-n592d\") pod \"glance-operator-controller-manager-77987cd8cd-zghdb\" (UID: \"df68290e-5853-4fce-903f-354ea9f740e1\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.294368 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs7l4\" (UniqueName: \"kubernetes.io/projected/41505f0c-de81-41e5-b9e1-de8a17563b8d-kube-api-access-hs7l4\") pod \"horizon-operator-controller-manager-68c6d99b8f-8g876\" (UID: \"41505f0c-de81-41e5-b9e1-de8a17563b8d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.318564 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.319634 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.327425 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-75wpn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.327648 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.332327 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.333353 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.350420 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td6h7\" (UniqueName: \"kubernetes.io/projected/75840987-c6e3-45e6-912c-85771c498e41-kube-api-access-td6h7\") pod \"designate-operator-controller-manager-78b4bc895b-n78hw\" (UID: \"75840987-c6e3-45e6-912c-85771c498e41\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.355512 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-n2hnq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.360494 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqcfc\" (UniqueName: \"kubernetes.io/projected/3ea8e44c-f5b3-4b92-94ed-04954472481c-kube-api-access-kqcfc\") pod \"barbican-operator-controller-manager-7d9dfd778-4mznn\" (UID: \"3ea8e44c-f5b3-4b92-94ed-04954472481c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.364759 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.365252 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n592d\" (UniqueName: \"kubernetes.io/projected/df68290e-5853-4fce-903f-354ea9f740e1-kube-api-access-n592d\") pod \"glance-operator-controller-manager-77987cd8cd-zghdb\" (UID: \"df68290e-5853-4fce-903f-354ea9f740e1\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.393086 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.398261 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clm7n\" (UniqueName: \"kubernetes.io/projected/649283f6-ebcd-45a0-974f-e9c14138fa46-kube-api-access-clm7n\") pod \"heat-operator-controller-manager-5f64f6f8bb-nsggq\" (UID: \"649283f6-ebcd-45a0-974f-e9c14138fa46\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.398420 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs7l4\" (UniqueName: \"kubernetes.io/projected/41505f0c-de81-41e5-b9e1-de8a17563b8d-kube-api-access-hs7l4\") pod \"horizon-operator-controller-manager-68c6d99b8f-8g876\" (UID: \"41505f0c-de81-41e5-b9e1-de8a17563b8d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.405222 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.457852 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt8qp\" (UniqueName: \"kubernetes.io/projected/cce29053-f3ed-4dce-a362-c99b4aa31102-kube-api-access-vt8qp\") pod \"cinder-operator-controller-manager-859b6ccc6-wqx97\" (UID: \"cce29053-f3ed-4dce-a362-c99b4aa31102\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.473144 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.475626 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clm7n\" (UniqueName: \"kubernetes.io/projected/649283f6-ebcd-45a0-974f-e9c14138fa46-kube-api-access-clm7n\") pod \"heat-operator-controller-manager-5f64f6f8bb-nsggq\" (UID: \"649283f6-ebcd-45a0-974f-e9c14138fa46\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.490682 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.528477 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs7l4\" (UniqueName: \"kubernetes.io/projected/41505f0c-de81-41e5-b9e1-de8a17563b8d-kube-api-access-hs7l4\") pod \"horizon-operator-controller-manager-68c6d99b8f-8g876\" (UID: \"41505f0c-de81-41e5-b9e1-de8a17563b8d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.528838 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.529456 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49w9n\" (UniqueName: \"kubernetes.io/projected/ac2346b5-8522-40bf-8083-15d06d8b9afd-kube-api-access-49w9n\") pod \"ironic-operator-controller-manager-6c548fd776-49hnv\" (UID: \"ac2346b5-8522-40bf-8083-15d06d8b9afd\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.529545 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.529587 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7x4q\" (UniqueName: \"kubernetes.io/projected/f923e423-dcca-499c-8bf1-1c5d4288f20c-kube-api-access-q7x4q\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.530030 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.537130 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-qxct7" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.560562 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.561847 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.562991 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.576632 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.577838 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.581496 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-24fz6" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.584627 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-8l85n" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.585154 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.590058 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.599896 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.613881 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.632916 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.632976 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7x4q\" (UniqueName: \"kubernetes.io/projected/f923e423-dcca-499c-8bf1-1c5d4288f20c-kube-api-access-q7x4q\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.633021 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49w9n\" (UniqueName: \"kubernetes.io/projected/ac2346b5-8522-40bf-8083-15d06d8b9afd-kube-api-access-49w9n\") pod \"ironic-operator-controller-manager-6c548fd776-49hnv\" (UID: \"ac2346b5-8522-40bf-8083-15d06d8b9afd\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:07:50 crc kubenswrapper[4576]: E1203 09:07:50.633405 4576 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.633408 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmj4x\" (UniqueName: \"kubernetes.io/projected/13d78877-8170-498d-bf0c-ab37fb799c83-kube-api-access-fmj4x\") pod \"keystone-operator-controller-manager-7765d96ddf-64r8s\" (UID: \"13d78877-8170-498d-bf0c-ab37fb799c83\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:07:50 crc kubenswrapper[4576]: E1203 09:07:50.633494 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert podName:f923e423-dcca-499c-8bf1-1c5d4288f20c nodeName:}" failed. No retries permitted until 2025-12-03 09:07:51.133455882 +0000 UTC m=+1678.519432866 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert") pod "infra-operator-controller-manager-57548d458d-4tr5z" (UID: "f923e423-dcca-499c-8bf1-1c5d4288f20c") : secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.641228 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.642361 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.644799 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-vnxxc" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.669911 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.687089 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-rflv4"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.692057 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.695273 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-kq7fg" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.698698 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49w9n\" (UniqueName: \"kubernetes.io/projected/ac2346b5-8522-40bf-8083-15d06d8b9afd-kube-api-access-49w9n\") pod \"ironic-operator-controller-manager-6c548fd776-49hnv\" (UID: \"ac2346b5-8522-40bf-8083-15d06d8b9afd\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.699483 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.705551 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-c5bv7" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.716922 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.719620 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-rflv4"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.727166 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.735136 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqlfs\" (UniqueName: \"kubernetes.io/projected/a3246ab9-76a7-41dd-9fcd-57323766f4f2-kube-api-access-zqlfs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-42rgl\" (UID: \"a3246ab9-76a7-41dd-9fcd-57323766f4f2\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.744915 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.745224 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmj4x\" (UniqueName: \"kubernetes.io/projected/13d78877-8170-498d-bf0c-ab37fb799c83-kube-api-access-fmj4x\") pod \"keystone-operator-controller-manager-7765d96ddf-64r8s\" (UID: \"13d78877-8170-498d-bf0c-ab37fb799c83\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.747810 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pqhm\" (UniqueName: \"kubernetes.io/projected/7c74d79c-0100-40b9-a363-434b817b0504-kube-api-access-9pqhm\") pod \"manila-operator-controller-manager-7c79b5df47-w68kf\" (UID: \"7c74d79c-0100-40b9-a363-434b817b0504\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.765837 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.788487 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7x4q\" (UniqueName: \"kubernetes.io/projected/f923e423-dcca-499c-8bf1-1c5d4288f20c-kube-api-access-q7x4q\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.805269 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.806365 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.815818 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.816031 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-msg45" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.827040 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.829885 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.833737 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-9tc9n" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.845409 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmj4x\" (UniqueName: \"kubernetes.io/projected/13d78877-8170-498d-bf0c-ab37fb799c83-kube-api-access-fmj4x\") pod \"keystone-operator-controller-manager-7765d96ddf-64r8s\" (UID: \"13d78877-8170-498d-bf0c-ab37fb799c83\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.855436 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqlfs\" (UniqueName: \"kubernetes.io/projected/a3246ab9-76a7-41dd-9fcd-57323766f4f2-kube-api-access-zqlfs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-42rgl\" (UID: \"a3246ab9-76a7-41dd-9fcd-57323766f4f2\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.855773 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm99g\" (UniqueName: \"kubernetes.io/projected/5280c7ee-cf95-4f36-a074-247880784343-kube-api-access-rm99g\") pod \"octavia-operator-controller-manager-998648c74-rflv4\" (UID: \"5280c7ee-cf95-4f36-a074-247880784343\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.856613 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pqhm\" (UniqueName: \"kubernetes.io/projected/7c74d79c-0100-40b9-a363-434b817b0504-kube-api-access-9pqhm\") pod \"manila-operator-controller-manager-7c79b5df47-w68kf\" (UID: \"7c74d79c-0100-40b9-a363-434b817b0504\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.856788 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8d8t\" (UniqueName: \"kubernetes.io/projected/7e7e8ece-f6be-41dc-be20-b82b844b4b83-kube-api-access-z8d8t\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-jwqvx\" (UID: \"7e7e8ece-f6be-41dc-be20-b82b844b4b83\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.856915 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.857064 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djhn7\" (UniqueName: \"kubernetes.io/projected/e697b8bb-b78f-4b0c-92e6-adde533c75b6-kube-api-access-djhn7\") pod \"nova-operator-controller-manager-697bc559fc-7mg4g\" (UID: \"e697b8bb-b78f-4b0c-92e6-adde533c75b6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.857194 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4qz8\" (UniqueName: \"kubernetes.io/projected/c4445b62-9884-4667-96cd-ce531cc798c4-kube-api-access-k4qz8\") pod \"ovn-operator-controller-manager-b6456fdb6-g9t26\" (UID: \"c4445b62-9884-4667-96cd-ce531cc798c4\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.858487 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gsbr\" (UniqueName: \"kubernetes.io/projected/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-kube-api-access-6gsbr\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.869668 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.903979 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.904924 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.924584 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dzm65" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.925779 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.945339 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pqhm\" (UniqueName: \"kubernetes.io/projected/7c74d79c-0100-40b9-a363-434b817b0504-kube-api-access-9pqhm\") pod \"manila-operator-controller-manager-7c79b5df47-w68kf\" (UID: \"7c74d79c-0100-40b9-a363-434b817b0504\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.972960 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqlfs\" (UniqueName: \"kubernetes.io/projected/a3246ab9-76a7-41dd-9fcd-57323766f4f2-kube-api-access-zqlfs\") pod \"mariadb-operator-controller-manager-56bbcc9d85-42rgl\" (UID: \"a3246ab9-76a7-41dd-9fcd-57323766f4f2\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.985781 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp"] Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.987553 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.989807 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-5pdz8" Dec 03 09:07:50 crc kubenswrapper[4576]: I1203 09:07:50.995622 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.005608 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkg7j\" (UniqueName: \"kubernetes.io/projected/8c321c82-4420-4b97-a16b-ce20c7ebcb15-kube-api-access-zkg7j\") pod \"placement-operator-controller-manager-78f8948974-tdbjg\" (UID: \"8c321c82-4420-4b97-a16b-ce20c7ebcb15\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.005685 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gsbr\" (UniqueName: \"kubernetes.io/projected/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-kube-api-access-6gsbr\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.005754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm99g\" (UniqueName: \"kubernetes.io/projected/5280c7ee-cf95-4f36-a074-247880784343-kube-api-access-rm99g\") pod \"octavia-operator-controller-manager-998648c74-rflv4\" (UID: \"5280c7ee-cf95-4f36-a074-247880784343\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.005824 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8d8t\" (UniqueName: \"kubernetes.io/projected/7e7e8ece-f6be-41dc-be20-b82b844b4b83-kube-api-access-z8d8t\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-jwqvx\" (UID: \"7e7e8ece-f6be-41dc-be20-b82b844b4b83\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.006079 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.006150 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djhn7\" (UniqueName: \"kubernetes.io/projected/e697b8bb-b78f-4b0c-92e6-adde533c75b6-kube-api-access-djhn7\") pod \"nova-operator-controller-manager-697bc559fc-7mg4g\" (UID: \"e697b8bb-b78f-4b0c-92e6-adde533c75b6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.006189 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4qz8\" (UniqueName: \"kubernetes.io/projected/c4445b62-9884-4667-96cd-ce531cc798c4-kube-api-access-k4qz8\") pod \"ovn-operator-controller-manager-b6456fdb6-g9t26\" (UID: \"c4445b62-9884-4667-96cd-ce531cc798c4\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.006812 4576 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.006892 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert podName:bad742c6-6ff9-4fe9-8a09-7d399b6d41de nodeName:}" failed. No retries permitted until 2025-12-03 09:07:51.506869575 +0000 UTC m=+1678.892846559 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" (UID: "bad742c6-6ff9-4fe9-8a09-7d399b6d41de") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.062907 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8d8t\" (UniqueName: \"kubernetes.io/projected/7e7e8ece-f6be-41dc-be20-b82b844b4b83-kube-api-access-z8d8t\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-jwqvx\" (UID: \"7e7e8ece-f6be-41dc-be20-b82b844b4b83\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.072079 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djhn7\" (UniqueName: \"kubernetes.io/projected/e697b8bb-b78f-4b0c-92e6-adde533c75b6-kube-api-access-djhn7\") pod \"nova-operator-controller-manager-697bc559fc-7mg4g\" (UID: \"e697b8bb-b78f-4b0c-92e6-adde533c75b6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.080162 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.080224 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4qz8\" (UniqueName: \"kubernetes.io/projected/c4445b62-9884-4667-96cd-ce531cc798c4-kube-api-access-k4qz8\") pod \"ovn-operator-controller-manager-b6456fdb6-g9t26\" (UID: \"c4445b62-9884-4667-96cd-ce531cc798c4\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.082132 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm99g\" (UniqueName: \"kubernetes.io/projected/5280c7ee-cf95-4f36-a074-247880784343-kube-api-access-rm99g\") pod \"octavia-operator-controller-manager-998648c74-rflv4\" (UID: \"5280c7ee-cf95-4f36-a074-247880784343\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.094260 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.094815 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gsbr\" (UniqueName: \"kubernetes.io/projected/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-kube-api-access-6gsbr\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.115337 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkg7j\" (UniqueName: \"kubernetes.io/projected/8c321c82-4420-4b97-a16b-ce20c7ebcb15-kube-api-access-zkg7j\") pod \"placement-operator-controller-manager-78f8948974-tdbjg\" (UID: \"8c321c82-4420-4b97-a16b-ce20c7ebcb15\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.115393 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrmnv\" (UniqueName: \"kubernetes.io/projected/f6e0d66d-6dc0-461c-a5c5-8a1060b6b164-kube-api-access-jrmnv\") pod \"swift-operator-controller-manager-5f8c65bbfc-6mlkp\" (UID: \"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.118196 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.149245 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.150284 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.161280 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jdd9t" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.166949 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.168010 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.193225 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkg7j\" (UniqueName: \"kubernetes.io/projected/8c321c82-4420-4b97-a16b-ce20c7ebcb15-kube-api-access-zkg7j\") pod \"placement-operator-controller-manager-78f8948974-tdbjg\" (UID: \"8c321c82-4420-4b97-a16b-ce20c7ebcb15\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.195171 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-ksdsk" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.217856 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrmnv\" (UniqueName: \"kubernetes.io/projected/f6e0d66d-6dc0-461c-a5c5-8a1060b6b164-kube-api-access-jrmnv\") pod \"swift-operator-controller-manager-5f8c65bbfc-6mlkp\" (UID: \"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.218004 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.218051 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmpvq\" (UniqueName: \"kubernetes.io/projected/7d654424-85f1-4848-93f3-abb64297ce3b-kube-api-access-mmpvq\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vb4nv\" (UID: \"7d654424-85f1-4848-93f3-abb64297ce3b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.219645 4576 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.219736 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert podName:f923e423-dcca-499c-8bf1-1c5d4288f20c nodeName:}" failed. No retries permitted until 2025-12-03 09:07:52.219711394 +0000 UTC m=+1679.605688378 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert") pod "infra-operator-controller-manager-57548d458d-4tr5z" (UID: "f923e423-dcca-499c-8bf1-1c5d4288f20c") : secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.220091 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.239044 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.253616 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.253931 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.304632 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrmnv\" (UniqueName: \"kubernetes.io/projected/f6e0d66d-6dc0-461c-a5c5-8a1060b6b164-kube-api-access-jrmnv\") pod \"swift-operator-controller-manager-5f8c65bbfc-6mlkp\" (UID: \"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.307134 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.310737 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.319815 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5m8j\" (UniqueName: \"kubernetes.io/projected/3ce77fe1-0135-4043-9ebd-b7722db624d9-kube-api-access-h5m8j\") pod \"test-operator-controller-manager-5854674fcc-ndz7z\" (UID: \"3ce77fe1-0135-4043-9ebd-b7722db624d9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.319960 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmpvq\" (UniqueName: \"kubernetes.io/projected/7d654424-85f1-4848-93f3-abb64297ce3b-kube-api-access-mmpvq\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vb4nv\" (UID: \"7d654424-85f1-4848-93f3-abb64297ce3b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.320828 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.324989 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.344204 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.352893 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-g7wf9" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.376307 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.389631 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.422763 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5m8j\" (UniqueName: \"kubernetes.io/projected/3ce77fe1-0135-4043-9ebd-b7722db624d9-kube-api-access-h5m8j\") pod \"test-operator-controller-manager-5854674fcc-ndz7z\" (UID: \"3ce77fe1-0135-4043-9ebd-b7722db624d9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.423886 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.428299 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmpvq\" (UniqueName: \"kubernetes.io/projected/7d654424-85f1-4848-93f3-abb64297ce3b-kube-api-access-mmpvq\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vb4nv\" (UID: \"7d654424-85f1-4848-93f3-abb64297ce3b\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.489857 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.511786 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5m8j\" (UniqueName: \"kubernetes.io/projected/3ce77fe1-0135-4043-9ebd-b7722db624d9-kube-api-access-h5m8j\") pod \"test-operator-controller-manager-5854674fcc-ndz7z\" (UID: \"3ce77fe1-0135-4043-9ebd-b7722db624d9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.532977 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.534118 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.534206 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-772cf\" (UniqueName: \"kubernetes.io/projected/8a7accfb-c3a2-4f70-906e-b2a3545eb88a-kube-api-access-772cf\") pod \"watcher-operator-controller-manager-769dc69bc-4c8wv\" (UID: \"8a7accfb-c3a2-4f70-906e-b2a3545eb88a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.534395 4576 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.534480 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert podName:bad742c6-6ff9-4fe9-8a09-7d399b6d41de nodeName:}" failed. No retries permitted until 2025-12-03 09:07:52.534460651 +0000 UTC m=+1679.920437645 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" (UID: "bad742c6-6ff9-4fe9-8a09-7d399b6d41de") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.565677 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.566648 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.574320 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bkz27" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.574754 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.581399 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.582132 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.635284 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k7vv\" (UniqueName: \"kubernetes.io/projected/446e8b7b-0e54-4b76-b0c7-56ec1f779499-kube-api-access-7k7vv\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.635369 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.635408 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-772cf\" (UniqueName: \"kubernetes.io/projected/8a7accfb-c3a2-4f70-906e-b2a3545eb88a-kube-api-access-772cf\") pod \"watcher-operator-controller-manager-769dc69bc-4c8wv\" (UID: \"8a7accfb-c3a2-4f70-906e-b2a3545eb88a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.635428 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.659861 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-772cf\" (UniqueName: \"kubernetes.io/projected/8a7accfb-c3a2-4f70-906e-b2a3545eb88a-kube-api-access-772cf\") pod \"watcher-operator-controller-manager-769dc69bc-4c8wv\" (UID: \"8a7accfb-c3a2-4f70-906e-b2a3545eb88a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.743541 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k7vv\" (UniqueName: \"kubernetes.io/projected/446e8b7b-0e54-4b76-b0c7-56ec1f779499-kube-api-access-7k7vv\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.743703 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.743746 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.745805 4576 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.745864 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:52.245846441 +0000 UTC m=+1679.631823425 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "webhook-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.746057 4576 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: E1203 09:07:51.746097 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:52.246085227 +0000 UTC m=+1679.632062311 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "metrics-server-cert" not found Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.772149 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.773239 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.779649 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-cs9v4" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.782723 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.794798 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k7vv\" (UniqueName: \"kubernetes.io/projected/446e8b7b-0e54-4b76-b0c7-56ec1f779499-kube-api-access-7k7vv\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.845934 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.846513 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq8nk\" (UniqueName: \"kubernetes.io/projected/e72f6251-8004-43cc-9bf2-80bc4b8d4431-kube-api-access-hq8nk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qv7t2\" (UID: \"e72f6251-8004-43cc-9bf2-80bc4b8d4431\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.891048 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.919643 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.938617 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw"] Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.957341 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq8nk\" (UniqueName: \"kubernetes.io/projected/e72f6251-8004-43cc-9bf2-80bc4b8d4431-kube-api-access-hq8nk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qv7t2\" (UID: \"e72f6251-8004-43cc-9bf2-80bc4b8d4431\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.969002 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" event={"ID":"3ea8e44c-f5b3-4b92-94ed-04954472481c","Type":"ContainerStarted","Data":"d3798ad16ed449075d14c08adc303ce28fe5916e52852c166f4cfb744ff2cca3"} Dec 03 09:07:51 crc kubenswrapper[4576]: I1203 09:07:51.990013 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq8nk\" (UniqueName: \"kubernetes.io/projected/e72f6251-8004-43cc-9bf2-80bc4b8d4431-kube-api-access-hq8nk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qv7t2\" (UID: \"e72f6251-8004-43cc-9bf2-80bc4b8d4431\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.135515 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.268662 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.268985 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.269023 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269168 4576 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269233 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:53.269212982 +0000 UTC m=+1680.655189966 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "metrics-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269640 4576 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269682 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:53.269671274 +0000 UTC m=+1680.655648258 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269735 4576 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.269763 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert podName:f923e423-dcca-499c-8bf1-1c5d4288f20c nodeName:}" failed. No retries permitted until 2025-12-03 09:07:54.269753296 +0000 UTC m=+1681.655730280 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert") pod "infra-operator-controller-manager-57548d458d-4tr5z" (UID: "f923e423-dcca-499c-8bf1-1c5d4288f20c") : secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.395675 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97"] Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.451341 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb"] Dec 03 09:07:52 crc kubenswrapper[4576]: W1203 09:07:52.482634 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf68290e_5853_4fce_903f_354ea9f740e1.slice/crio-2a36110a795b5890ed84ab949f75b1d6c9d1033e5ed3c725ca841ebfbebff2d1 WatchSource:0}: Error finding container 2a36110a795b5890ed84ab949f75b1d6c9d1033e5ed3c725ca841ebfbebff2d1: Status 404 returned error can't find the container with id 2a36110a795b5890ed84ab949f75b1d6c9d1033e5ed3c725ca841ebfbebff2d1 Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.574390 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.574652 4576 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: E1203 09:07:52.574738 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert podName:bad742c6-6ff9-4fe9-8a09-7d399b6d41de nodeName:}" failed. No retries permitted until 2025-12-03 09:07:54.574714268 +0000 UTC m=+1681.960691252 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" (UID: "bad742c6-6ff9-4fe9-8a09-7d399b6d41de") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.825748 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq"] Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.857731 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv"] Dec 03 09:07:52 crc kubenswrapper[4576]: W1203 09:07:52.870747 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac2346b5_8522_40bf_8083_15d06d8b9afd.slice/crio-b8aa49b4a25ec60d14e7cb3899dec697817aad46b218dc69183063428d57e6f5 WatchSource:0}: Error finding container b8aa49b4a25ec60d14e7cb3899dec697817aad46b218dc69183063428d57e6f5: Status 404 returned error can't find the container with id b8aa49b4a25ec60d14e7cb3899dec697817aad46b218dc69183063428d57e6f5 Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.987308 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" event={"ID":"ac2346b5-8522-40bf-8083-15d06d8b9afd","Type":"ContainerStarted","Data":"b8aa49b4a25ec60d14e7cb3899dec697817aad46b218dc69183063428d57e6f5"} Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.991759 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" event={"ID":"df68290e-5853-4fce-903f-354ea9f740e1","Type":"ContainerStarted","Data":"2a36110a795b5890ed84ab949f75b1d6c9d1033e5ed3c725ca841ebfbebff2d1"} Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.994719 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" event={"ID":"41505f0c-de81-41e5-b9e1-de8a17563b8d","Type":"ContainerStarted","Data":"93dd17d08d8ba9b7a427be1b420e8b96621759a81806a6d3c77a367021085f55"} Dec 03 09:07:52 crc kubenswrapper[4576]: I1203 09:07:52.999275 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" event={"ID":"649283f6-ebcd-45a0-974f-e9c14138fa46","Type":"ContainerStarted","Data":"542ea794aa7ce645078c31bbf19c5019513b84b0881b76e6fef62441e8a51919"} Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.002787 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" event={"ID":"cce29053-f3ed-4dce-a362-c99b4aa31102","Type":"ContainerStarted","Data":"9cc918619d6bb74d15cede28b5b43c385d87ecd757173bb7c2f09b8370f67384"} Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.006070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" event={"ID":"75840987-c6e3-45e6-912c-85771c498e41","Type":"ContainerStarted","Data":"8efd74cee7184110def2d1051dc8eb985ab7b611e2544ae1105e1be0640996c4"} Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.198178 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg"] Dec 03 09:07:53 crc kubenswrapper[4576]: W1203 09:07:53.215182 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c321c82_4420_4b97_a16b_ce20c7ebcb15.slice/crio-db45f027cc8054e3395b91e12430149c94c3e0c6ad7d8de3d76f30d162dd13ac WatchSource:0}: Error finding container db45f027cc8054e3395b91e12430149c94c3e0c6ad7d8de3d76f30d162dd13ac: Status 404 returned error can't find the container with id db45f027cc8054e3395b91e12430149c94c3e0c6ad7d8de3d76f30d162dd13ac Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.244658 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.265616 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26"] Dec 03 09:07:53 crc kubenswrapper[4576]: W1203 09:07:53.279280 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c74d79c_0100_40b9_a363_434b817b0504.slice/crio-45e1562b2ac765f6eee8a430d51a03109450a402a51c7d50642c7dfff1ce345d WatchSource:0}: Error finding container 45e1562b2ac765f6eee8a430d51a03109450a402a51c7d50642c7dfff1ce345d: Status 404 returned error can't find the container with id 45e1562b2ac765f6eee8a430d51a03109450a402a51c7d50642c7dfff1ce345d Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.286843 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g"] Dec 03 09:07:53 crc kubenswrapper[4576]: W1203 09:07:53.309163 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4445b62_9884_4667_96cd_ce531cc798c4.slice/crio-645391b744cf315b6f97899aefe352de0a33867ddd8b1c7d43d4ce7993143caa WatchSource:0}: Error finding container 645391b744cf315b6f97899aefe352de0a33867ddd8b1c7d43d4ce7993143caa: Status 404 returned error can't find the container with id 645391b744cf315b6f97899aefe352de0a33867ddd8b1c7d43d4ce7993143caa Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.317014 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.317585 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.317854 4576 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.317925 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:55.317900768 +0000 UTC m=+1682.703877752 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "metrics-server-cert" not found Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.317986 4576 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.318013 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:55.31800116 +0000 UTC m=+1682.703978144 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "webhook-server-cert" not found Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.342597 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.378748 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-rflv4"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.387306 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.400951 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf"] Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.402293 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-772cf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-4c8wv_openstack-operators(8a7accfb-c3a2-4f70-906e-b2a3545eb88a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.402292 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h5m8j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-ndz7z_openstack-operators(3ce77fe1-0135-4043-9ebd-b7722db624d9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.412730 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv"] Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.416917 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-772cf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-4c8wv_openstack-operators(8a7accfb-c3a2-4f70-906e-b2a3545eb88a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.416976 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h5m8j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-ndz7z_openstack-operators(3ce77fe1-0135-4043-9ebd-b7722db624d9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: W1203 09:07:53.417539 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d654424_85f1_4848_93f3_abb64297ce3b.slice/crio-a75cc4271e93a938fe7242c360d45aea2ad5526ee9a7886356144b32f9f9e74c WatchSource:0}: Error finding container a75cc4271e93a938fe7242c360d45aea2ad5526ee9a7886356144b32f9f9e74c: Status 404 returned error can't find the container with id a75cc4271e93a938fe7242c360d45aea2ad5526ee9a7886356144b32f9f9e74c Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.419438 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" podUID="3ce77fe1-0135-4043-9ebd-b7722db624d9" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.421499 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podUID="8a7accfb-c3a2-4f70-906e-b2a3545eb88a" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.427553 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mmpvq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-vb4nv_openstack-operators(7d654424-85f1-4848-93f3-abb64297ce3b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.427871 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z"] Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.429956 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mmpvq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-vb4nv_openstack-operators(7d654424-85f1-4848-93f3-abb64297ce3b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 09:07:53 crc kubenswrapper[4576]: E1203 09:07:53.431410 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podUID="7d654424-85f1-4848-93f3-abb64297ce3b" Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.436659 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.464131 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2"] Dec 03 09:07:53 crc kubenswrapper[4576]: I1203 09:07:53.473287 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv"] Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.039850 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" event={"ID":"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164","Type":"ContainerStarted","Data":"1ee355658f742031d84e0c0b55b1bb155f64727b251053829670f3941c85c716"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.071332 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" event={"ID":"c4445b62-9884-4667-96cd-ce531cc798c4","Type":"ContainerStarted","Data":"645391b744cf315b6f97899aefe352de0a33867ddd8b1c7d43d4ce7993143caa"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.071374 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" event={"ID":"7d654424-85f1-4848-93f3-abb64297ce3b","Type":"ContainerStarted","Data":"a75cc4271e93a938fe7242c360d45aea2ad5526ee9a7886356144b32f9f9e74c"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.078148 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" event={"ID":"3ce77fe1-0135-4043-9ebd-b7722db624d9","Type":"ContainerStarted","Data":"51b2d53342344f2474083ad1b2643bd108280b07f55d88a65458ba1d71fcd3eb"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.082344 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" event={"ID":"8a7accfb-c3a2-4f70-906e-b2a3545eb88a","Type":"ContainerStarted","Data":"54a9df62e78287d26de3d212d183f690999122b814c527bab3c5e69ba88e06ba"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.085477 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" event={"ID":"7e7e8ece-f6be-41dc-be20-b82b844b4b83","Type":"ContainerStarted","Data":"9d7ba0d30a46cb7b662db7b6bd61ba176c89131453c4d3f102dd2530da7651b2"} Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.092362 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podUID="7d654424-85f1-4848-93f3-abb64297ce3b" Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.093605 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" podUID="3ce77fe1-0135-4043-9ebd-b7722db624d9" Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.094381 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podUID="8a7accfb-c3a2-4f70-906e-b2a3545eb88a" Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.095127 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" event={"ID":"e697b8bb-b78f-4b0c-92e6-adde533c75b6","Type":"ContainerStarted","Data":"e9b6179965e702253d04109a7c0152fa657426448e852d0eb0872e55afaa4f42"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.108989 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" event={"ID":"e72f6251-8004-43cc-9bf2-80bc4b8d4431","Type":"ContainerStarted","Data":"df88fd8dc19259a6802644f545c28cfb3e24fe1a373284300b74e5a88379d8ae"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.118810 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" event={"ID":"5280c7ee-cf95-4f36-a074-247880784343","Type":"ContainerStarted","Data":"3cffb6169030f7c170c0a18fb9a2e84e3532547489e79c0927aebb0bb6c271e6"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.121826 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" event={"ID":"7c74d79c-0100-40b9-a363-434b817b0504","Type":"ContainerStarted","Data":"45e1562b2ac765f6eee8a430d51a03109450a402a51c7d50642c7dfff1ce345d"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.126902 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" event={"ID":"13d78877-8170-498d-bf0c-ab37fb799c83","Type":"ContainerStarted","Data":"469081e2baeb254a4e62238cefe08e21468157c4643752c6adfc6df25a79f37f"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.130807 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" event={"ID":"8c321c82-4420-4b97-a16b-ce20c7ebcb15","Type":"ContainerStarted","Data":"db45f027cc8054e3395b91e12430149c94c3e0c6ad7d8de3d76f30d162dd13ac"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.135181 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" event={"ID":"a3246ab9-76a7-41dd-9fcd-57323766f4f2","Type":"ContainerStarted","Data":"7fdd0e6ff9b16757414513610ff258b8788a3e6510e85b9c36e7f0822fddad21"} Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.336495 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.336639 4576 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.336720 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert podName:f923e423-dcca-499c-8bf1-1c5d4288f20c nodeName:}" failed. No retries permitted until 2025-12-03 09:07:58.336701965 +0000 UTC m=+1685.722678939 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert") pod "infra-operator-controller-manager-57548d458d-4tr5z" (UID: "f923e423-dcca-499c-8bf1-1c5d4288f20c") : secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:54 crc kubenswrapper[4576]: I1203 09:07:54.640108 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.640399 4576 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:54 crc kubenswrapper[4576]: E1203 09:07:54.640494 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert podName:bad742c6-6ff9-4fe9-8a09-7d399b6d41de nodeName:}" failed. No retries permitted until 2025-12-03 09:07:58.640470235 +0000 UTC m=+1686.026447269 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" (UID: "bad742c6-6ff9-4fe9-8a09-7d399b6d41de") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.156433 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podUID="7d654424-85f1-4848-93f3-abb64297ce3b" Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.157596 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" podUID="3ce77fe1-0135-4043-9ebd-b7722db624d9" Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.164626 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podUID="8a7accfb-c3a2-4f70-906e-b2a3545eb88a" Dec 03 09:07:55 crc kubenswrapper[4576]: I1203 09:07:55.364413 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:55 crc kubenswrapper[4576]: I1203 09:07:55.364476 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.364646 4576 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.364704 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:59.36468693 +0000 UTC m=+1686.750663914 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "metrics-server-cert" not found Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.364697 4576 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 09:07:55 crc kubenswrapper[4576]: E1203 09:07:55.364810 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:07:59.364780643 +0000 UTC m=+1686.750757617 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "webhook-server-cert" not found Dec 03 09:07:57 crc kubenswrapper[4576]: I1203 09:07:57.677151 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:07:57 crc kubenswrapper[4576]: E1203 09:07:57.677802 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:07:58 crc kubenswrapper[4576]: I1203 09:07:58.409249 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:07:58 crc kubenswrapper[4576]: E1203 09:07:58.409492 4576 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:58 crc kubenswrapper[4576]: E1203 09:07:58.409579 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert podName:f923e423-dcca-499c-8bf1-1c5d4288f20c nodeName:}" failed. No retries permitted until 2025-12-03 09:08:06.40955777 +0000 UTC m=+1693.795534754 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert") pod "infra-operator-controller-manager-57548d458d-4tr5z" (UID: "f923e423-dcca-499c-8bf1-1c5d4288f20c") : secret "infra-operator-webhook-server-cert" not found Dec 03 09:07:58 crc kubenswrapper[4576]: I1203 09:07:58.712725 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:07:58 crc kubenswrapper[4576]: E1203 09:07:58.712946 4576 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:58 crc kubenswrapper[4576]: E1203 09:07:58.713013 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert podName:bad742c6-6ff9-4fe9-8a09-7d399b6d41de nodeName:}" failed. No retries permitted until 2025-12-03 09:08:06.712993291 +0000 UTC m=+1694.098970275 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" (UID: "bad742c6-6ff9-4fe9-8a09-7d399b6d41de") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 09:07:59 crc kubenswrapper[4576]: I1203 09:07:59.421846 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:59 crc kubenswrapper[4576]: E1203 09:07:59.422044 4576 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 09:07:59 crc kubenswrapper[4576]: I1203 09:07:59.422225 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:07:59 crc kubenswrapper[4576]: E1203 09:07:59.422286 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:08:07.422262329 +0000 UTC m=+1694.808239323 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "metrics-server-cert" not found Dec 03 09:07:59 crc kubenswrapper[4576]: E1203 09:07:59.422369 4576 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 09:07:59 crc kubenswrapper[4576]: E1203 09:07:59.422452 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs podName:446e8b7b-0e54-4b76-b0c7-56ec1f779499 nodeName:}" failed. No retries permitted until 2025-12-03 09:08:07.422431304 +0000 UTC m=+1694.808408288 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs") pod "openstack-operator-controller-manager-665947b8b5-hr855" (UID: "446e8b7b-0e54-4b76-b0c7-56ec1f779499") : secret "webhook-server-cert" not found Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.413769 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.419984 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f923e423-dcca-499c-8bf1-1c5d4288f20c-cert\") pod \"infra-operator-controller-manager-57548d458d-4tr5z\" (UID: \"f923e423-dcca-499c-8bf1-1c5d4288f20c\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:08:06 crc kubenswrapper[4576]: E1203 09:08:06.470087 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 03 09:08:06 crc kubenswrapper[4576]: E1203 09:08:06.470571 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zqlfs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-42rgl_openstack-operators(a3246ab9-76a7-41dd-9fcd-57323766f4f2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.622852 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-75wpn" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.631088 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.720315 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.742175 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bad742c6-6ff9-4fe9-8a09-7d399b6d41de-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm\" (UID: \"bad742c6-6ff9-4fe9-8a09-7d399b6d41de\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.806169 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-msg45" Dec 03 09:08:06 crc kubenswrapper[4576]: I1203 09:08:06.814543 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.447702 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.447840 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.452331 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-webhook-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.471057 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/446e8b7b-0e54-4b76-b0c7-56ec1f779499-metrics-certs\") pod \"openstack-operator-controller-manager-665947b8b5-hr855\" (UID: \"446e8b7b-0e54-4b76-b0c7-56ec1f779499\") " pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.511036 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bkz27" Dec 03 09:08:07 crc kubenswrapper[4576]: I1203 09:08:07.519536 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:08 crc kubenswrapper[4576]: E1203 09:08:08.245846 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 03 09:08:08 crc kubenswrapper[4576]: E1203 09:08:08.246040 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-td6h7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-n78hw_openstack-operators(75840987-c6e3-45e6-912c-85771c498e41): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:08 crc kubenswrapper[4576]: I1203 09:08:08.677216 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:08:08 crc kubenswrapper[4576]: E1203 09:08:08.677576 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:08:09 crc kubenswrapper[4576]: E1203 09:08:09.019872 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 03 09:08:09 crc kubenswrapper[4576]: E1203 09:08:09.020762 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jrmnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-6mlkp_openstack-operators(f6e0d66d-6dc0-461c-a5c5-8a1060b6b164): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:10 crc kubenswrapper[4576]: E1203 09:08:10.962632 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 03 09:08:10 crc kubenswrapper[4576]: E1203 09:08:10.962981 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-clm7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-nsggq_openstack-operators(649283f6-ebcd-45a0-974f-e9c14138fa46): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:11 crc kubenswrapper[4576]: E1203 09:08:11.507271 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 03 09:08:11 crc kubenswrapper[4576]: E1203 09:08:11.507497 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zkg7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-tdbjg_openstack-operators(8c321c82-4420-4b97-a16b-ce20c7ebcb15): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:15 crc kubenswrapper[4576]: E1203 09:08:15.603731 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 03 09:08:15 crc kubenswrapper[4576]: E1203 09:08:15.604173 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-49w9n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-49hnv_openstack-operators(ac2346b5-8522-40bf-8083-15d06d8b9afd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:16 crc kubenswrapper[4576]: E1203 09:08:16.248690 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 03 09:08:16 crc kubenswrapper[4576]: E1203 09:08:16.249260 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z8d8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-jwqvx_openstack-operators(7e7e8ece-f6be-41dc-be20-b82b844b4b83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:19 crc kubenswrapper[4576]: E1203 09:08:19.362956 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 03 09:08:19 crc kubenswrapper[4576]: E1203 09:08:19.363448 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rm99g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-rflv4_openstack-operators(5280c7ee-cf95-4f36-a074-247880784343): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:19 crc kubenswrapper[4576]: I1203 09:08:19.677789 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:08:19 crc kubenswrapper[4576]: E1203 09:08:19.678387 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:08:22 crc kubenswrapper[4576]: E1203 09:08:22.010736 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 03 09:08:22 crc kubenswrapper[4576]: E1203 09:08:22.010999 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9pqhm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-w68kf_openstack-operators(7c74d79c-0100-40b9-a363-434b817b0504): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:22 crc kubenswrapper[4576]: E1203 09:08:22.685957 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 03 09:08:22 crc kubenswrapper[4576]: E1203 09:08:22.686880 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vt8qp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-wqx97_openstack-operators(cce29053-f3ed-4dce-a362-c99b4aa31102): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:23 crc kubenswrapper[4576]: E1203 09:08:23.538996 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 03 09:08:23 crc kubenswrapper[4576]: E1203 09:08:23.539176 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-djhn7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-7mg4g_openstack-operators(e697b8bb-b78f-4b0c-92e6-adde533c75b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:24 crc kubenswrapper[4576]: E1203 09:08:24.180003 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 03 09:08:24 crc kubenswrapper[4576]: E1203 09:08:24.180425 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mmpvq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-vb4nv_openstack-operators(7d654424-85f1-4848-93f3-abb64297ce3b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:25 crc kubenswrapper[4576]: E1203 09:08:25.851942 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 03 09:08:25 crc kubenswrapper[4576]: E1203 09:08:25.852767 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-772cf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-4c8wv_openstack-operators(8a7accfb-c3a2-4f70-906e-b2a3545eb88a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:27 crc kubenswrapper[4576]: E1203 09:08:27.611063 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 03 09:08:27 crc kubenswrapper[4576]: E1203 09:08:27.611391 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fmj4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-64r8s_openstack-operators(13d78877-8170-498d-bf0c-ab37fb799c83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:28 crc kubenswrapper[4576]: E1203 09:08:28.502950 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 03 09:08:28 crc kubenswrapper[4576]: E1203 09:08:28.503812 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hq8nk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-qv7t2_openstack-operators(e72f6251-8004-43cc-9bf2-80bc4b8d4431): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:28 crc kubenswrapper[4576]: E1203 09:08:28.505326 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" podUID="e72f6251-8004-43cc-9bf2-80bc4b8d4431" Dec 03 09:08:29 crc kubenswrapper[4576]: I1203 09:08:29.118509 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm"] Dec 03 09:08:29 crc kubenswrapper[4576]: I1203 09:08:29.362052 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855"] Dec 03 09:08:29 crc kubenswrapper[4576]: I1203 09:08:29.383087 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z"] Dec 03 09:08:29 crc kubenswrapper[4576]: W1203 09:08:29.499832 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod446e8b7b_0e54_4b76_b0c7_56ec1f779499.slice/crio-f553cce32d3c54c000547d9a64a43207ea0a9cb2d463c38b41b578500f0e9c40 WatchSource:0}: Error finding container f553cce32d3c54c000547d9a64a43207ea0a9cb2d463c38b41b578500f0e9c40: Status 404 returned error can't find the container with id f553cce32d3c54c000547d9a64a43207ea0a9cb2d463c38b41b578500f0e9c40 Dec 03 09:08:29 crc kubenswrapper[4576]: W1203 09:08:29.502356 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf923e423_dcca_499c_8bf1_1c5d4288f20c.slice/crio-a71d7391d9235186b50b6ae9f815180649d83b1fcc449b8267cc493ed505c3c5 WatchSource:0}: Error finding container a71d7391d9235186b50b6ae9f815180649d83b1fcc449b8267cc493ed505c3c5: Status 404 returned error can't find the container with id a71d7391d9235186b50b6ae9f815180649d83b1fcc449b8267cc493ed505c3c5 Dec 03 09:08:29 crc kubenswrapper[4576]: I1203 09:08:29.507087 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" event={"ID":"bad742c6-6ff9-4fe9-8a09-7d399b6d41de","Type":"ContainerStarted","Data":"2bf61b8b212d1a7720be55a72a571ffb5594ce8a068d04a92da973e60df41eb7"} Dec 03 09:08:29 crc kubenswrapper[4576]: E1203 09:08:29.509351 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" podUID="e72f6251-8004-43cc-9bf2-80bc4b8d4431" Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.514280 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" event={"ID":"f923e423-dcca-499c-8bf1-1c5d4288f20c","Type":"ContainerStarted","Data":"a71d7391d9235186b50b6ae9f815180649d83b1fcc449b8267cc493ed505c3c5"} Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.538765 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" event={"ID":"3ea8e44c-f5b3-4b92-94ed-04954472481c","Type":"ContainerStarted","Data":"ba35d5da9f814082ae64c737d16cc47064b072544101c407e5db231666d41112"} Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.544891 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" event={"ID":"c4445b62-9884-4667-96cd-ce531cc798c4","Type":"ContainerStarted","Data":"f902f48cc40f64d1355e5447eedb59ef8983a8820f64b6030255629993a44f4f"} Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.551260 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" event={"ID":"41505f0c-de81-41e5-b9e1-de8a17563b8d","Type":"ContainerStarted","Data":"c9fd5d061d50c274281fb99073f7a14468935017853affa2843905cdce731898"} Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.556066 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" event={"ID":"446e8b7b-0e54-4b76-b0c7-56ec1f779499","Type":"ContainerStarted","Data":"f553cce32d3c54c000547d9a64a43207ea0a9cb2d463c38b41b578500f0e9c40"} Dec 03 09:08:30 crc kubenswrapper[4576]: I1203 09:08:30.561941 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" event={"ID":"df68290e-5853-4fce-903f-354ea9f740e1","Type":"ContainerStarted","Data":"92188b76325377d684211ff02060d0daba48e3edd0cc07f10d474fcc26210c00"} Dec 03 09:08:33 crc kubenswrapper[4576]: I1203 09:08:33.686334 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:08:33 crc kubenswrapper[4576]: E1203 09:08:33.686913 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:08:35 crc kubenswrapper[4576]: I1203 09:08:35.615822 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" event={"ID":"446e8b7b-0e54-4b76-b0c7-56ec1f779499","Type":"ContainerStarted","Data":"2b72aa60e4db78c367c82f727db54d6f89baa46f3be1774425975670e7c72671"} Dec 03 09:08:35 crc kubenswrapper[4576]: I1203 09:08:35.616194 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:35 crc kubenswrapper[4576]: I1203 09:08:35.639649 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" event={"ID":"3ce77fe1-0135-4043-9ebd-b7722db624d9","Type":"ContainerStarted","Data":"fd9b973a005b3dd33915846a5458d2e13420a39fc7bfda8cd4efb281f3ba3b51"} Dec 03 09:08:35 crc kubenswrapper[4576]: I1203 09:08:35.695422 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" podStartSLOduration=44.695381368 podStartE2EDuration="44.695381368s" podCreationTimestamp="2025-12-03 09:07:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:08:35.690858983 +0000 UTC m=+1723.076835977" watchObservedRunningTime="2025-12-03 09:08:35.695381368 +0000 UTC m=+1723.081358352" Dec 03 09:08:36 crc kubenswrapper[4576]: E1203 09:08:36.313595 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 09:08:36 crc kubenswrapper[4576]: E1203 09:08:36.313850 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zqlfs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-42rgl_openstack-operators(a3246ab9-76a7-41dd-9fcd-57323766f4f2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:08:36 crc kubenswrapper[4576]: E1203 09:08:36.315638 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" podUID="a3246ab9-76a7-41dd-9fcd-57323766f4f2" Dec 03 09:08:38 crc kubenswrapper[4576]: E1203 09:08:38.498747 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" podUID="8c321c82-4420-4b97-a16b-ce20c7ebcb15" Dec 03 09:08:38 crc kubenswrapper[4576]: E1203 09:08:38.652303 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" podUID="75840987-c6e3-45e6-912c-85771c498e41" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.669838 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" event={"ID":"8c321c82-4420-4b97-a16b-ce20c7ebcb15","Type":"ContainerStarted","Data":"ad14b5a2f3c1a05470eee44224392780d6663386976071b1f675e4aca6303601"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.683716 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" event={"ID":"a3246ab9-76a7-41dd-9fcd-57323766f4f2","Type":"ContainerStarted","Data":"d26858958759d25d6faba864f540a3d1c5c74b3971e16cece52ea423b93524ac"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.690586 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" event={"ID":"df68290e-5853-4fce-903f-354ea9f740e1","Type":"ContainerStarted","Data":"141eaf668fa5e94954d02de6632805f1b9fc097e94ca45c1818c47590bea9d9e"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.691192 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:08:38 crc kubenswrapper[4576]: E1203 09:08:38.695186 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podUID="7d654424-85f1-4848-93f3-abb64297ce3b" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.700114 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" event={"ID":"3ce77fe1-0135-4043-9ebd-b7722db624d9","Type":"ContainerStarted","Data":"13cf5be7752d20db093fbd532d855e9b2a82075d45d686037319e7a0aec672f3"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.700453 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.702909 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.723655 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" event={"ID":"75840987-c6e3-45e6-912c-85771c498e41","Type":"ContainerStarted","Data":"77da2b7b34b8a868db382aea7b45eb5dfdc74573e3679fb8860eca272f886a2c"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.728369 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zghdb" podStartSLOduration=3.385566714 podStartE2EDuration="48.728346146s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:52.497723975 +0000 UTC m=+1679.883700969" lastFinishedPulling="2025-12-03 09:08:37.840503407 +0000 UTC m=+1725.226480401" observedRunningTime="2025-12-03 09:08:38.722986119 +0000 UTC m=+1726.108963103" watchObservedRunningTime="2025-12-03 09:08:38.728346146 +0000 UTC m=+1726.114323130" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.736169 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" event={"ID":"3ea8e44c-f5b3-4b92-94ed-04954472481c","Type":"ContainerStarted","Data":"5afb5e89678cb2c57375914a221c01c267254c72645e26bb79d0c774ec92be71"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.737334 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.746746 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.752782 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" event={"ID":"41505f0c-de81-41e5-b9e1-de8a17563b8d","Type":"ContainerStarted","Data":"ab61ff4f6b9e58e96010f2cc81c376b5caded6258b62f2e5f7c5890f1d7b3b14"} Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.753721 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.759504 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" podStartSLOduration=4.221899782 podStartE2EDuration="48.759491069s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.402105516 +0000 UTC m=+1680.788082490" lastFinishedPulling="2025-12-03 09:08:37.939696793 +0000 UTC m=+1725.325673777" observedRunningTime="2025-12-03 09:08:38.755852779 +0000 UTC m=+1726.141829763" watchObservedRunningTime="2025-12-03 09:08:38.759491069 +0000 UTC m=+1726.145468043" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.762845 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.854491 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4mznn" podStartSLOduration=3.012642551 podStartE2EDuration="48.854473291s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:51.923733414 +0000 UTC m=+1679.309710398" lastFinishedPulling="2025-12-03 09:08:37.765564154 +0000 UTC m=+1725.151541138" observedRunningTime="2025-12-03 09:08:38.831630985 +0000 UTC m=+1726.217607969" watchObservedRunningTime="2025-12-03 09:08:38.854473291 +0000 UTC m=+1726.240450275" Dec 03 09:08:38 crc kubenswrapper[4576]: I1203 09:08:38.964787 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8g876" podStartSLOduration=3.077441011 podStartE2EDuration="48.964762002s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:51.967129248 +0000 UTC m=+1679.353106232" lastFinishedPulling="2025-12-03 09:08:37.854450239 +0000 UTC m=+1725.240427223" observedRunningTime="2025-12-03 09:08:38.880854624 +0000 UTC m=+1726.266831608" watchObservedRunningTime="2025-12-03 09:08:38.964762002 +0000 UTC m=+1726.350738986" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.325695 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" podUID="f6e0d66d-6dc0-461c-a5c5-8a1060b6b164" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.336116 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" podUID="ac2346b5-8522-40bf-8083-15d06d8b9afd" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.356874 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" podUID="5280c7ee-cf95-4f36-a074-247880784343" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.357364 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" podUID="649283f6-ebcd-45a0-974f-e9c14138fa46" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.367844 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" podUID="7c74d79c-0100-40b9-a363-434b817b0504" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.369221 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" podUID="13d78877-8170-498d-bf0c-ab37fb799c83" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.369370 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" podUID="e697b8bb-b78f-4b0c-92e6-adde533c75b6" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.372756 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" podUID="7e7e8ece-f6be-41dc-be20-b82b844b4b83" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.404213 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podUID="8a7accfb-c3a2-4f70-906e-b2a3545eb88a" Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.431087 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" podUID="cce29053-f3ed-4dce-a362-c99b4aa31102" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.772889 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" event={"ID":"13d78877-8170-498d-bf0c-ab37fb799c83","Type":"ContainerStarted","Data":"3a409b4d48b3d18b49c978a4c4a02f0873573170a8a145714d2d826b8ac75de3"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.780190 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" event={"ID":"8a7accfb-c3a2-4f70-906e-b2a3545eb88a","Type":"ContainerStarted","Data":"8c0a8d25981aa31ba40192036d5b719a3fbca74efd189b03a356f93148fa44cc"} Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.785133 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podUID="8a7accfb-c3a2-4f70-906e-b2a3545eb88a" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.789844 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" event={"ID":"f923e423-dcca-499c-8bf1-1c5d4288f20c","Type":"ContainerStarted","Data":"5cbf06c5b24d94bfed0033b791fb338e6c078666becba1f03b2996c945b24542"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.796842 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" event={"ID":"c4445b62-9884-4667-96cd-ce531cc798c4","Type":"ContainerStarted","Data":"97805042dafd252fd4de3a8d215bb22805ff8ecdb6013ffde45eedfd9ee8d3f3"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.797987 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.800763 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.804940 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" event={"ID":"cce29053-f3ed-4dce-a362-c99b4aa31102","Type":"ContainerStarted","Data":"5b4acfc8d366fe068c14064b206feea505f5cb3892319fef8eb322602db119fe"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.813648 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" event={"ID":"7c74d79c-0100-40b9-a363-434b817b0504","Type":"ContainerStarted","Data":"495036b1a2fc343ab8cd44c506de267bac2610a1784acca0b9c1238e0694b83b"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.824630 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" event={"ID":"ac2346b5-8522-40bf-8083-15d06d8b9afd","Type":"ContainerStarted","Data":"e9fe7e515be361986bd5e904ad0744f389afb767c6ee06f4370a92f8cc1bc09e"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.829499 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" event={"ID":"e697b8bb-b78f-4b0c-92e6-adde533c75b6","Type":"ContainerStarted","Data":"690bfe4789366cbd0847e9a18525d43e6122c3d6d87b016d6e2d6f65151891bb"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.835495 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" event={"ID":"7d654424-85f1-4848-93f3-abb64297ce3b","Type":"ContainerStarted","Data":"8d5ffff728608ec972949bd966825976ac906fd7d3381534eceb154e6bed96dc"} Dec 03 09:08:39 crc kubenswrapper[4576]: E1203 09:08:39.838301 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podUID="7d654424-85f1-4848-93f3-abb64297ce3b" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.843989 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" event={"ID":"bad742c6-6ff9-4fe9-8a09-7d399b6d41de","Type":"ContainerStarted","Data":"387f922baf5ff76e6a7ff01c175003c35dc11a7ae79900948fc3835952c09d6e"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.846673 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" event={"ID":"a3246ab9-76a7-41dd-9fcd-57323766f4f2","Type":"ContainerStarted","Data":"dc0cf7d2cc848640064102748e530839994ae36d6637babc7c84022866e753b8"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.847420 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.856489 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" event={"ID":"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164","Type":"ContainerStarted","Data":"0721b67777252f8c671ca1323725525d3272ec490b287fdc997986f6f6b36576"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.876876 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" event={"ID":"5280c7ee-cf95-4f36-a074-247880784343","Type":"ContainerStarted","Data":"0b19832b95ca0ba0f8241ab316e6bc28af837c70cac1aadc5e7fe5452fbedf73"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.895850 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" event={"ID":"649283f6-ebcd-45a0-974f-e9c14138fa46","Type":"ContainerStarted","Data":"2b3c8b8f9670739379341665051e3e0088245cfac2ca8821a16d607c9610b249"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.916175 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" event={"ID":"7e7e8ece-f6be-41dc-be20-b82b844b4b83","Type":"ContainerStarted","Data":"5d76ba27761a4f9afb8acae4db40a59ffccf1aa92d3ab9d7faddb3edf156b1b2"} Dec 03 09:08:39 crc kubenswrapper[4576]: I1203 09:08:39.921700 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-ndz7z" Dec 03 09:08:40 crc kubenswrapper[4576]: I1203 09:08:40.432103 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g9t26" podStartSLOduration=5.924897867 podStartE2EDuration="50.432013302s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.352583077 +0000 UTC m=+1680.738560051" lastFinishedPulling="2025-12-03 09:08:37.859698502 +0000 UTC m=+1725.245675486" observedRunningTime="2025-12-03 09:08:40.426280865 +0000 UTC m=+1727.812257859" watchObservedRunningTime="2025-12-03 09:08:40.432013302 +0000 UTC m=+1727.817990286" Dec 03 09:08:40 crc kubenswrapper[4576]: I1203 09:08:40.530229 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" podStartSLOduration=6.066220383 podStartE2EDuration="50.530205083s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.337979731 +0000 UTC m=+1680.723956715" lastFinishedPulling="2025-12-03 09:08:37.801964431 +0000 UTC m=+1725.187941415" observedRunningTime="2025-12-03 09:08:40.528983309 +0000 UTC m=+1727.914960293" watchObservedRunningTime="2025-12-03 09:08:40.530205083 +0000 UTC m=+1727.916182067" Dec 03 09:08:40 crc kubenswrapper[4576]: I1203 09:08:40.925896 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" event={"ID":"bad742c6-6ff9-4fe9-8a09-7d399b6d41de","Type":"ContainerStarted","Data":"7fe0a29d5f68fac00fb3b0411a72b4e3270a823685075c3dc659719568ef44ff"} Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.951313 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" event={"ID":"8c321c82-4420-4b97-a16b-ce20c7ebcb15","Type":"ContainerStarted","Data":"6e4b6c01ba3d25acc3ee6db01e5f6887f7f1cd89a027156f892571de7ee171a6"} Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.952515 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.959042 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" event={"ID":"75840987-c6e3-45e6-912c-85771c498e41","Type":"ContainerStarted","Data":"f468ae82a87086bff201a638a3fe8f00a0c1e2d18f1696226dd86ea9f7f6cfe7"} Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.959778 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.962239 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" event={"ID":"f923e423-dcca-499c-8bf1-1c5d4288f20c","Type":"ContainerStarted","Data":"69c98cd6840786832defcdbe2b3833742533d7c29db6948f221cc234e6112f9a"} Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.962768 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.966296 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" event={"ID":"cce29053-f3ed-4dce-a362-c99b4aa31102","Type":"ContainerStarted","Data":"be4ce66bef4d7ae8ef6537760bf1c8bfec5ebeeaa11fa6851a61f33a7457366f"} Dec 03 09:08:41 crc kubenswrapper[4576]: I1203 09:08:41.982160 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" podStartSLOduration=5.737940021 podStartE2EDuration="51.982132894s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.218836777 +0000 UTC m=+1680.604813761" lastFinishedPulling="2025-12-03 09:08:39.46302965 +0000 UTC m=+1726.849006634" observedRunningTime="2025-12-03 09:08:41.97910575 +0000 UTC m=+1729.365082754" watchObservedRunningTime="2025-12-03 09:08:41.982132894 +0000 UTC m=+1729.368109888" Dec 03 09:08:42 crc kubenswrapper[4576]: I1203 09:08:42.006729 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" podStartSLOduration=4.594692422 podStartE2EDuration="52.006712207s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:52.001319934 +0000 UTC m=+1679.387296918" lastFinishedPulling="2025-12-03 09:08:39.413339719 +0000 UTC m=+1726.799316703" observedRunningTime="2025-12-03 09:08:42.000939069 +0000 UTC m=+1729.386916063" watchObservedRunningTime="2025-12-03 09:08:42.006712207 +0000 UTC m=+1729.392689191" Dec 03 09:08:42 crc kubenswrapper[4576]: I1203 09:08:42.060177 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" podStartSLOduration=43.753428955 podStartE2EDuration="52.06014884s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:08:29.49580051 +0000 UTC m=+1716.881777494" lastFinishedPulling="2025-12-03 09:08:37.802520395 +0000 UTC m=+1725.188497379" observedRunningTime="2025-12-03 09:08:42.05428275 +0000 UTC m=+1729.440259744" watchObservedRunningTime="2025-12-03 09:08:42.06014884 +0000 UTC m=+1729.446125824" Dec 03 09:08:42 crc kubenswrapper[4576]: I1203 09:08:42.084412 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" podStartSLOduration=43.408870747 podStartE2EDuration="52.084387405s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:08:29.508356674 +0000 UTC m=+1716.894333658" lastFinishedPulling="2025-12-03 09:08:38.183873332 +0000 UTC m=+1725.569850316" observedRunningTime="2025-12-03 09:08:42.08057888 +0000 UTC m=+1729.466555884" watchObservedRunningTime="2025-12-03 09:08:42.084387405 +0000 UTC m=+1729.470364389" Dec 03 09:08:42 crc kubenswrapper[4576]: I1203 09:08:42.991257 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" podStartSLOduration=3.816358468 podStartE2EDuration="52.991232414s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:52.459601014 +0000 UTC m=+1679.845577998" lastFinishedPulling="2025-12-03 09:08:41.63447494 +0000 UTC m=+1729.020451944" observedRunningTime="2025-12-03 09:08:42.988421207 +0000 UTC m=+1730.374398191" watchObservedRunningTime="2025-12-03 09:08:42.991232414 +0000 UTC m=+1730.377209398" Dec 03 09:08:43 crc kubenswrapper[4576]: I1203 09:08:43.982058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" event={"ID":"7c74d79c-0100-40b9-a363-434b817b0504","Type":"ContainerStarted","Data":"08b782296e475d2c74a4a1dff4a2cd4dcd68bf24bdb398033a4133087c8cce43"} Dec 03 09:08:43 crc kubenswrapper[4576]: I1203 09:08:43.982384 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:08:43 crc kubenswrapper[4576]: I1203 09:08:43.983928 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" event={"ID":"649283f6-ebcd-45a0-974f-e9c14138fa46","Type":"ContainerStarted","Data":"23bce3c59e501d88efc26f0561477904532c10779043c5703a3b068a2c20d99f"} Dec 03 09:08:43 crc kubenswrapper[4576]: I1203 09:08:43.999923 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" podStartSLOduration=5.415416948 podStartE2EDuration="53.999899604s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.300219599 +0000 UTC m=+1680.686196583" lastFinishedPulling="2025-12-03 09:08:41.884702255 +0000 UTC m=+1729.270679239" observedRunningTime="2025-12-03 09:08:43.997540999 +0000 UTC m=+1731.383518003" watchObservedRunningTime="2025-12-03 09:08:43.999899604 +0000 UTC m=+1731.385876588" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.041763 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" podStartSLOduration=4.995580514 podStartE2EDuration="54.041740731s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:52.838591639 +0000 UTC m=+1680.224568623" lastFinishedPulling="2025-12-03 09:08:41.884751856 +0000 UTC m=+1729.270728840" observedRunningTime="2025-12-03 09:08:44.019666396 +0000 UTC m=+1731.405643420" watchObservedRunningTime="2025-12-03 09:08:44.041740731 +0000 UTC m=+1731.427717715" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.991143 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" event={"ID":"13d78877-8170-498d-bf0c-ab37fb799c83","Type":"ContainerStarted","Data":"711159eb5ca9cb660655b9c3b187d1f8e79ec03e21749307cb22a8c00f99123d"} Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.991225 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.993039 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" event={"ID":"ac2346b5-8522-40bf-8083-15d06d8b9afd","Type":"ContainerStarted","Data":"d35eab6d12abf67019b626674f43f5ada89be189896d03a43c7657818608eee4"} Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.993450 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.995473 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" event={"ID":"7e7e8ece-f6be-41dc-be20-b82b844b4b83","Type":"ContainerStarted","Data":"d369310c57798488093c2e9f3a5fbf28cd76becca495d893993b3862a63fac1c"} Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.995587 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.997653 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" event={"ID":"f6e0d66d-6dc0-461c-a5c5-8a1060b6b164","Type":"ContainerStarted","Data":"5f0a1fcc6b0d42205de538115205a962ab841d5cf3bec2ada56feb499a1d0145"} Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.997981 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:08:44 crc kubenswrapper[4576]: I1203 09:08:44.998013 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:08:45 crc kubenswrapper[4576]: I1203 09:08:45.040438 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" podStartSLOduration=4.300737871 podStartE2EDuration="55.040421106s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.401729896 +0000 UTC m=+1680.787706880" lastFinishedPulling="2025-12-03 09:08:44.141413131 +0000 UTC m=+1731.527390115" observedRunningTime="2025-12-03 09:08:45.016259804 +0000 UTC m=+1732.402236818" watchObservedRunningTime="2025-12-03 09:08:45.040421106 +0000 UTC m=+1732.426398090" Dec 03 09:08:45 crc kubenswrapper[4576]: I1203 09:08:45.043327 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" podStartSLOduration=4.303545818 podStartE2EDuration="55.043309815s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.401646204 +0000 UTC m=+1680.787623188" lastFinishedPulling="2025-12-03 09:08:44.141410201 +0000 UTC m=+1731.527387185" observedRunningTime="2025-12-03 09:08:45.038649768 +0000 UTC m=+1732.424626772" watchObservedRunningTime="2025-12-03 09:08:45.043309815 +0000 UTC m=+1732.429286799" Dec 03 09:08:45 crc kubenswrapper[4576]: I1203 09:08:45.062940 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" podStartSLOduration=4.262012399 podStartE2EDuration="55.062916522s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.344487327 +0000 UTC m=+1680.730464311" lastFinishedPulling="2025-12-03 09:08:44.14539145 +0000 UTC m=+1731.531368434" observedRunningTime="2025-12-03 09:08:45.059178319 +0000 UTC m=+1732.445155323" watchObservedRunningTime="2025-12-03 09:08:45.062916522 +0000 UTC m=+1732.448893506" Dec 03 09:08:45 crc kubenswrapper[4576]: I1203 09:08:45.093450 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" podStartSLOduration=3.824650827 podStartE2EDuration="55.093423108s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:52.87265803 +0000 UTC m=+1680.258635014" lastFinishedPulling="2025-12-03 09:08:44.141430311 +0000 UTC m=+1731.527407295" observedRunningTime="2025-12-03 09:08:45.084987777 +0000 UTC m=+1732.470964771" watchObservedRunningTime="2025-12-03 09:08:45.093423108 +0000 UTC m=+1732.479400092" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.005844 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" event={"ID":"e697b8bb-b78f-4b0c-92e6-adde533c75b6","Type":"ContainerStarted","Data":"7d344bb6b06288361f8889924fe42c8ebad398a8935440e545924ab329e2c3e3"} Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.006212 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.007403 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" event={"ID":"e72f6251-8004-43cc-9bf2-80bc4b8d4431","Type":"ContainerStarted","Data":"c4a6bf682f02d3b960e0e402325be288032569d6d30c84c5b3ef53454245cce6"} Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.009473 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" event={"ID":"5280c7ee-cf95-4f36-a074-247880784343","Type":"ContainerStarted","Data":"2e30b285d275d7dba041d613b8d9dc9056daa6782dbe000d96be57978eab4079"} Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.031395 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" podStartSLOduration=4.148050211 podStartE2EDuration="56.03136988s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.274838782 +0000 UTC m=+1680.660815766" lastFinishedPulling="2025-12-03 09:08:45.158158451 +0000 UTC m=+1732.544135435" observedRunningTime="2025-12-03 09:08:46.027186106 +0000 UTC m=+1733.413163100" watchObservedRunningTime="2025-12-03 09:08:46.03136988 +0000 UTC m=+1733.417346874" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.055202 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qv7t2" podStartSLOduration=3.301127003 podStartE2EDuration="55.055178402s" podCreationTimestamp="2025-12-03 09:07:51 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.401981273 +0000 UTC m=+1680.787958257" lastFinishedPulling="2025-12-03 09:08:45.156032672 +0000 UTC m=+1732.542009656" observedRunningTime="2025-12-03 09:08:46.047129012 +0000 UTC m=+1733.433106006" watchObservedRunningTime="2025-12-03 09:08:46.055178402 +0000 UTC m=+1733.441155386" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.076586 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" podStartSLOduration=4.282677411 podStartE2EDuration="56.076556338s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.361065626 +0000 UTC m=+1680.747042610" lastFinishedPulling="2025-12-03 09:08:45.154944553 +0000 UTC m=+1732.540921537" observedRunningTime="2025-12-03 09:08:46.071214091 +0000 UTC m=+1733.457191095" watchObservedRunningTime="2025-12-03 09:08:46.076556338 +0000 UTC m=+1733.462533342" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.642958 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-4tr5z" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.677761 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:08:46 crc kubenswrapper[4576]: E1203 09:08:46.678268 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.815329 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:08:46 crc kubenswrapper[4576]: I1203 09:08:46.828284 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm" Dec 03 09:08:47 crc kubenswrapper[4576]: I1203 09:08:47.015800 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:08:47 crc kubenswrapper[4576]: I1203 09:08:47.527689 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-665947b8b5-hr855" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.477392 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-n78hw" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.566842 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.727895 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.731256 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wqx97" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.773091 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-49hnv" Dec 03 09:08:50 crc kubenswrapper[4576]: I1203 09:08:50.912503 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-64r8s" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.097313 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-rflv4" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.243286 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-w68kf" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.258828 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-42rgl" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.325977 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-jwqvx" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.326557 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-tdbjg" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.395373 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7mg4g" Dec 03 09:08:51 crc kubenswrapper[4576]: I1203 09:08:51.444185 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-6mlkp" Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.062091 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" event={"ID":"8a7accfb-c3a2-4f70-906e-b2a3545eb88a","Type":"ContainerStarted","Data":"ef7ec25f3614702670428a424c22daa7fd6de0067f924a02096013c6a9027062"} Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.063803 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.064952 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" event={"ID":"7d654424-85f1-4848-93f3-abb64297ce3b","Type":"ContainerStarted","Data":"66ead58a2f9f9f3224d2079718e7adb35c185229e00978e4c95d8b144c40a2b1"} Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.065257 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.088504 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" podStartSLOduration=3.913119354 podStartE2EDuration="1m4.088468999s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.402020054 +0000 UTC m=+1680.787997038" lastFinishedPulling="2025-12-03 09:08:53.577369699 +0000 UTC m=+1740.963346683" observedRunningTime="2025-12-03 09:08:54.078943458 +0000 UTC m=+1741.464920442" watchObservedRunningTime="2025-12-03 09:08:54.088468999 +0000 UTC m=+1741.474445983" Dec 03 09:08:54 crc kubenswrapper[4576]: I1203 09:08:54.118855 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" podStartSLOduration=4.134067848 podStartE2EDuration="1m4.118830541s" podCreationTimestamp="2025-12-03 09:07:50 +0000 UTC" firstStartedPulling="2025-12-03 09:07:53.427414211 +0000 UTC m=+1680.813391195" lastFinishedPulling="2025-12-03 09:08:53.412176904 +0000 UTC m=+1740.798153888" observedRunningTime="2025-12-03 09:08:54.111370966 +0000 UTC m=+1741.497347960" watchObservedRunningTime="2025-12-03 09:08:54.118830541 +0000 UTC m=+1741.504807525" Dec 03 09:09:00 crc kubenswrapper[4576]: I1203 09:09:00.677451 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:09:00 crc kubenswrapper[4576]: E1203 09:09:00.678108 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:09:01 crc kubenswrapper[4576]: I1203 09:09:01.493108 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vb4nv" Dec 03 09:09:01 crc kubenswrapper[4576]: I1203 09:09:01.849549 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-4c8wv" Dec 03 09:09:11 crc kubenswrapper[4576]: I1203 09:09:11.677213 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:09:11 crc kubenswrapper[4576]: E1203 09:09:11.678034 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.489726 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.494227 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.503728 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.503980 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.504108 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.504334 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-pqztc" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.508051 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.569434 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.569494 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ttqv\" (UniqueName: \"kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.593882 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.595076 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.598009 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.620056 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671005 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671053 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ttqv\" (UniqueName: \"kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671100 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671134 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671164 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnp2h\" (UniqueName: \"kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.671989 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.692091 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ttqv\" (UniqueName: \"kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv\") pod \"dnsmasq-dns-675f4bcbfc-5vtbx\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.772308 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.772630 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.772657 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnp2h\" (UniqueName: \"kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.773543 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.773779 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.818505 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnp2h\" (UniqueName: \"kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h\") pod \"dnsmasq-dns-78dd6ddcc-xgjxj\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.824972 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:25 crc kubenswrapper[4576]: I1203 09:09:25.912088 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:26 crc kubenswrapper[4576]: I1203 09:09:26.330036 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:26 crc kubenswrapper[4576]: I1203 09:09:26.341137 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:09:26 crc kubenswrapper[4576]: I1203 09:09:26.374621 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:26 crc kubenswrapper[4576]: W1203 09:09:26.375733 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46e82bf5_baf2_4164_9844_c1f3e771402b.slice/crio-adb41fea1e6221b7da7740ce09626ca9ca9ca6bf285fd0aa7bab38a8dbf20461 WatchSource:0}: Error finding container adb41fea1e6221b7da7740ce09626ca9ca9ca6bf285fd0aa7bab38a8dbf20461: Status 404 returned error can't find the container with id adb41fea1e6221b7da7740ce09626ca9ca9ca6bf285fd0aa7bab38a8dbf20461 Dec 03 09:09:26 crc kubenswrapper[4576]: I1203 09:09:26.677708 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:09:26 crc kubenswrapper[4576]: E1203 09:09:26.678158 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:09:27 crc kubenswrapper[4576]: I1203 09:09:27.354568 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" event={"ID":"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd","Type":"ContainerStarted","Data":"bf8b76d1354739b07f1c496d8178d5e8b2bdaf4acb6d6f016c3ef99ac8ad4aab"} Dec 03 09:09:27 crc kubenswrapper[4576]: I1203 09:09:27.357389 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" event={"ID":"46e82bf5-baf2-4164-9844-c1f3e771402b","Type":"ContainerStarted","Data":"adb41fea1e6221b7da7740ce09626ca9ca9ca6bf285fd0aa7bab38a8dbf20461"} Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.640102 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.662206 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.671792 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.671882 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.741648 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.743600 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.743714 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4vbg\" (UniqueName: \"kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.846412 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.846482 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4vbg\" (UniqueName: \"kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.846574 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.847744 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.848310 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:28 crc kubenswrapper[4576]: I1203 09:09:28.876719 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4vbg\" (UniqueName: \"kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg\") pod \"dnsmasq-dns-666b6646f7-q744m\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.006249 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.031706 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.115865 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.116996 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.157941 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95fng\" (UniqueName: \"kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.157996 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.158043 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.233470 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.268257 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95fng\" (UniqueName: \"kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.268302 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.268352 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.269235 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.270035 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.305633 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95fng\" (UniqueName: \"kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng\") pod \"dnsmasq-dns-57d769cc4f-gjhtr\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.466822 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.844738 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.846944 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.853681 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.853993 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.854142 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.854357 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.854461 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.856069 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.856218 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-knqw9" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.861208 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.891916 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.891981 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892006 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892030 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892063 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892081 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892108 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892127 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892152 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thkt6\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892195 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.892214 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.943660 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.996949 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.996992 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997046 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997118 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997141 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997178 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997498 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997555 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997580 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997594 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997634 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thkt6\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997863 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.997938 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:29 crc kubenswrapper[4576]: I1203 09:09:29.999265 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.000093 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.000223 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.001049 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.003307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.004351 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.013380 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.013505 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.016083 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.018319 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thkt6\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.033712 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.177546 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.243035 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.245873 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.249983 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250020 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250469 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250674 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250713 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250778 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.250800 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.254918 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-x8bsx" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.302937 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303034 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303121 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303156 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spswx\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303202 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303266 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303305 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303331 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303358 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303386 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.303426 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408406 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spswx\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408488 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408645 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408700 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408729 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408762 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408798 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408800 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q744m" event={"ID":"382f950f-abff-4951-a3d1-d1fdbae63e30","Type":"ContainerStarted","Data":"786ef965337b00927d6cc9ff8e9d8b1034c607ff772b20d3f5e2d77fe67d3357"} Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408829 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408936 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.408961 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.409044 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.409393 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.417279 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.420263 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.422320 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.424312 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.424487 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.424836 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.427993 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.428519 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.428925 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.431956 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" event={"ID":"a2e06f57-3350-4eae-9f9b-d544fa698861","Type":"ContainerStarted","Data":"69c0d42daf62e0d9456f5b1adadf8bb77d6b5a6531fcaf855eae37f0541cb3fd"} Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.442357 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spswx\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.449258 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.581368 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:09:30 crc kubenswrapper[4576]: I1203 09:09:30.751847 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:09:30 crc kubenswrapper[4576]: W1203 09:09:30.760332 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc20e6200_091a_47c3_afef_d1b4d9538309.slice/crio-297f604f3bb58820b9e9771e53c01494b83023596d6972b274bbe8c6fc988040 WatchSource:0}: Error finding container 297f604f3bb58820b9e9771e53c01494b83023596d6972b274bbe8c6fc988040: Status 404 returned error can't find the container with id 297f604f3bb58820b9e9771e53c01494b83023596d6972b274bbe8c6fc988040 Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.180273 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.450037 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerStarted","Data":"297f604f3bb58820b9e9771e53c01494b83023596d6972b274bbe8c6fc988040"} Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.692450 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.693986 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.696466 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-5t5k4" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.703898 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.726501 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.726937 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.728674 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.731878 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855506 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855603 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855636 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-default\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855664 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855685 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-kolla-config\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855700 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855717 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.855768 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zvhm\" (UniqueName: \"kubernetes.io/projected/288e65b5-6608-4063-9996-eb5180ffbf0e-kube-api-access-5zvhm\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.958165 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.958256 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-kolla-config\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.958346 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.964600 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.965247 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zvhm\" (UniqueName: \"kubernetes.io/projected/288e65b5-6608-4063-9996-eb5180ffbf0e-kube-api-access-5zvhm\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.965476 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.965552 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.965577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-default\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.966453 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.969229 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.972554 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:31 crc kubenswrapper[4576]: I1203 09:09:31.973327 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288e65b5-6608-4063-9996-eb5180ffbf0e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:31.999913 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-config-data-default\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.003124 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-kolla-config\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.005188 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/288e65b5-6608-4063-9996-eb5180ffbf0e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.016339 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.018241 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zvhm\" (UniqueName: \"kubernetes.io/projected/288e65b5-6608-4063-9996-eb5180ffbf0e-kube-api-access-5zvhm\") pod \"openstack-galera-0\" (UID: \"288e65b5-6608-4063-9996-eb5180ffbf0e\") " pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.054245 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.869375 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.870971 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.876733 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-gzgf7" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.876996 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.877234 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.877366 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.886474 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991729 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991774 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991805 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991827 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/83513275-c7cc-450e-9bca-79ca295b7906-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991843 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s65vx\" (UniqueName: \"kubernetes.io/projected/83513275-c7cc-450e-9bca-79ca295b7906-kube-api-access-s65vx\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991873 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991897 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:32 crc kubenswrapper[4576]: I1203 09:09:32.991922 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.092873 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.092918 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/83513275-c7cc-450e-9bca-79ca295b7906-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.092941 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s65vx\" (UniqueName: \"kubernetes.io/projected/83513275-c7cc-450e-9bca-79ca295b7906-kube-api-access-s65vx\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.092967 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.092990 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.093017 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.093088 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.093111 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.093686 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/83513275-c7cc-450e-9bca-79ca295b7906-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.094135 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.094304 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.094655 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.102088 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.104373 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83513275-c7cc-450e-9bca-79ca295b7906-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.123384 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/83513275-c7cc-450e-9bca-79ca295b7906-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.129718 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s65vx\" (UniqueName: \"kubernetes.io/projected/83513275-c7cc-450e-9bca-79ca295b7906-kube-api-access-s65vx\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.168161 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"83513275-c7cc-450e-9bca-79ca295b7906\") " pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.194699 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.262215 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.263408 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.270948 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4ldn9" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.271018 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.271188 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.306100 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.398326 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.398424 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.398453 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-config-data\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.398513 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kolla-config\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.398556 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljw4w\" (UniqueName: \"kubernetes.io/projected/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kube-api-access-ljw4w\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.500112 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kolla-config\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.500167 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljw4w\" (UniqueName: \"kubernetes.io/projected/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kube-api-access-ljw4w\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.500209 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.500295 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.500315 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-config-data\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.501288 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-config-data\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.510904 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kolla-config\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.511826 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.512046 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.522400 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljw4w\" (UniqueName: \"kubernetes.io/projected/c7c148cb-508f-45ed-a5ea-06b0b4bc51ff-kube-api-access-ljw4w\") pod \"memcached-0\" (UID: \"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff\") " pod="openstack/memcached-0" Dec 03 09:09:33 crc kubenswrapper[4576]: I1203 09:09:33.591336 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.020663 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.021891 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.028230 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-f7nxm" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.044232 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.121493 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v952r\" (UniqueName: \"kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r\") pod \"kube-state-metrics-0\" (UID: \"4dce317b-36ba-47d9-9175-a50ed2bf038b\") " pod="openstack/kube-state-metrics-0" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.222827 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v952r\" (UniqueName: \"kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r\") pod \"kube-state-metrics-0\" (UID: \"4dce317b-36ba-47d9-9175-a50ed2bf038b\") " pod="openstack/kube-state-metrics-0" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.261577 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v952r\" (UniqueName: \"kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r\") pod \"kube-state-metrics-0\" (UID: \"4dce317b-36ba-47d9-9175-a50ed2bf038b\") " pod="openstack/kube-state-metrics-0" Dec 03 09:09:35 crc kubenswrapper[4576]: I1203 09:09:35.508963 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.453012 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-25phb"] Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.455264 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.457756 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.458513 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-cwr75" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.467081 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-25phb"] Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.472955 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.498241 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-pjp95"] Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.502054 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.521570 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-pjp95"] Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.547940 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.547985 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.548016 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn9b6\" (UniqueName: \"kubernetes.io/projected/1f709485-8dc6-4e99-ba88-880d491fca2e-kube-api-access-mn9b6\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.548057 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-ovn-controller-tls-certs\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.548084 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-combined-ca-bundle\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.548140 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f709485-8dc6-4e99-ba88-880d491fca2e-scripts\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.548164 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-log-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650319 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/310fba87-b39f-4613-a373-54ecd21ed629-scripts\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650413 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-combined-ca-bundle\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650520 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-run\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650702 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f709485-8dc6-4e99-ba88-880d491fca2e-scripts\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650781 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-lib\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650820 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-log-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650930 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.650966 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.651339 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bskcv\" (UniqueName: \"kubernetes.io/projected/310fba87-b39f-4613-a373-54ecd21ed629-kube-api-access-bskcv\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.651487 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn9b6\" (UniqueName: \"kubernetes.io/projected/1f709485-8dc6-4e99-ba88-880d491fca2e-kube-api-access-mn9b6\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.651639 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-etc-ovs\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.651744 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-ovn-controller-tls-certs\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.651851 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-log\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.652013 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.652096 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-run-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.652183 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1f709485-8dc6-4e99-ba88-880d491fca2e-var-log-ovn\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.654942 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1f709485-8dc6-4e99-ba88-880d491fca2e-scripts\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.675142 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-ovn-controller-tls-certs\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.675297 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f709485-8dc6-4e99-ba88-880d491fca2e-combined-ca-bundle\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.679185 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn9b6\" (UniqueName: \"kubernetes.io/projected/1f709485-8dc6-4e99-ba88-880d491fca2e-kube-api-access-mn9b6\") pod \"ovn-controller-25phb\" (UID: \"1f709485-8dc6-4e99-ba88-880d491fca2e\") " pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.753886 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-etc-ovs\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.753962 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-log\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.753997 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/310fba87-b39f-4613-a373-54ecd21ed629-scripts\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754030 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-run\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754086 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-lib\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754164 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bskcv\" (UniqueName: \"kubernetes.io/projected/310fba87-b39f-4613-a373-54ecd21ed629-kube-api-access-bskcv\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754278 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-log\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754356 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-run\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754351 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-etc-ovs\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.754460 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/310fba87-b39f-4613-a373-54ecd21ed629-var-lib\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.755921 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/310fba87-b39f-4613-a373-54ecd21ed629-scripts\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.772416 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.778564 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bskcv\" (UniqueName: \"kubernetes.io/projected/310fba87-b39f-4613-a373-54ecd21ed629-kube-api-access-bskcv\") pod \"ovn-controller-ovs-pjp95\" (UID: \"310fba87-b39f-4613-a373-54ecd21ed629\") " pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.819751 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.935498 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.937476 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.940747 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.941084 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-swj4g" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.941274 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.941486 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.942463 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 09:09:38 crc kubenswrapper[4576]: I1203 09:09:38.959407 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060309 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060428 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-config\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060455 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060507 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxc6r\" (UniqueName: \"kubernetes.io/projected/64d45188-c499-4626-bdd3-6f54a0ed3f14-kube-api-access-vxc6r\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060550 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060727 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060788 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.060863 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.162918 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163276 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163449 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-config\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163593 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163740 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxc6r\" (UniqueName: \"kubernetes.io/projected/64d45188-c499-4626-bdd3-6f54a0ed3f14-kube-api-access-vxc6r\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163868 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163989 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.164145 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.163885 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.164789 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.164947 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.165686 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64d45188-c499-4626-bdd3-6f54a0ed3f14-config\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.168375 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.172978 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.177978 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d45188-c499-4626-bdd3-6f54a0ed3f14-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.180899 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxc6r\" (UniqueName: \"kubernetes.io/projected/64d45188-c499-4626-bdd3-6f54a0ed3f14-kube-api-access-vxc6r\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.200712 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64d45188-c499-4626-bdd3-6f54a0ed3f14\") " pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.269856 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 09:09:39 crc kubenswrapper[4576]: I1203 09:09:39.677418 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:09:39 crc kubenswrapper[4576]: E1203 09:09:39.677678 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:09:42 crc kubenswrapper[4576]: W1203 09:09:42.085018 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d6451b7_d2df_487c_afa3_3f1e56758ce4.slice/crio-966bfff0933c521ea1d2bb5cc20a33f34753c788f9f0cbd3dec797aefe8b27c6 WatchSource:0}: Error finding container 966bfff0933c521ea1d2bb5cc20a33f34753c788f9f0cbd3dec797aefe8b27c6: Status 404 returned error can't find the container with id 966bfff0933c521ea1d2bb5cc20a33f34753c788f9f0cbd3dec797aefe8b27c6 Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.586959 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerStarted","Data":"966bfff0933c521ea1d2bb5cc20a33f34753c788f9f0cbd3dec797aefe8b27c6"} Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.677753 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.688854 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.692885 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.694892 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-thk2f" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.696703 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.697045 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.700140 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820485 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820586 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4k5n\" (UniqueName: \"kubernetes.io/projected/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-kube-api-access-l4k5n\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820610 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-config\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820631 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820671 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820693 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.820717 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.821328 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923059 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923157 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923197 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4k5n\" (UniqueName: \"kubernetes.io/projected/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-kube-api-access-l4k5n\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923248 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-config\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923274 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923305 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923337 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923366 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.923637 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.924005 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.924572 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-config\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.925341 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.930934 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.942299 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.946225 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.950740 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:42 crc kubenswrapper[4576]: I1203 09:09:42.951384 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4k5n\" (UniqueName: \"kubernetes.io/projected/3a2bee80-63d4-41ae-97fc-54a96c4afc6e-kube-api-access-l4k5n\") pod \"ovsdbserver-sb-0\" (UID: \"3a2bee80-63d4-41ae-97fc-54a96c4afc6e\") " pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:43 crc kubenswrapper[4576]: I1203 09:09:43.021270 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 09:09:51 crc kubenswrapper[4576]: I1203 09:09:51.490616 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-25phb"] Dec 03 09:09:51 crc kubenswrapper[4576]: I1203 09:09:51.677027 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:09:51 crc kubenswrapper[4576]: E1203 09:09:51.677390 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:09:51 crc kubenswrapper[4576]: E1203 09:09:51.966383 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 09:09:51 crc kubenswrapper[4576]: E1203 09:09:51.966649 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hnp2h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-xgjxj_openstack(46e82bf5-baf2-4164-9844-c1f3e771402b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:09:51 crc kubenswrapper[4576]: E1203 09:09:51.967784 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" podUID="46e82bf5-baf2-4164-9844-c1f3e771402b" Dec 03 09:09:52 crc kubenswrapper[4576]: E1203 09:09:52.957992 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 03 09:09:52 crc kubenswrapper[4576]: E1203 09:09:52.958439 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-thkt6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c20e6200-091a-47c3-afef-d1b4d9538309): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:09:52 crc kubenswrapper[4576]: E1203 09:09:52.959665 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" Dec 03 09:09:52 crc kubenswrapper[4576]: W1203 09:09:52.972726 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f709485_8dc6_4e99_ba88_880d491fca2e.slice/crio-28da0d419216bf011cd0d06c4569c82164a8129a289afb3d6a0837823dd11c9d WatchSource:0}: Error finding container 28da0d419216bf011cd0d06c4569c82164a8129a289afb3d6a0837823dd11c9d: Status 404 returned error can't find the container with id 28da0d419216bf011cd0d06c4569c82164a8129a289afb3d6a0837823dd11c9d Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.015792 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.016010 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q4vbg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-q744m_openstack(382f950f-abff-4951-a3d1-d1fdbae63e30): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.017445 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-q744m" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.032256 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.032459 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6ttqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-5vtbx_openstack(08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.033942 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" podUID="08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.096909 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.097061 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-95fng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-gjhtr_openstack(a2e06f57-3350-4eae-9f9b-d544fa698861): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.103624 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.163577 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.211178 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config\") pod \"46e82bf5-baf2-4164-9844-c1f3e771402b\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.211223 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc\") pod \"46e82bf5-baf2-4164-9844-c1f3e771402b\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.211286 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnp2h\" (UniqueName: \"kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h\") pod \"46e82bf5-baf2-4164-9844-c1f3e771402b\" (UID: \"46e82bf5-baf2-4164-9844-c1f3e771402b\") " Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.213015 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config" (OuterVolumeSpecName: "config") pod "46e82bf5-baf2-4164-9844-c1f3e771402b" (UID: "46e82bf5-baf2-4164-9844-c1f3e771402b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.215199 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h" (OuterVolumeSpecName: "kube-api-access-hnp2h") pod "46e82bf5-baf2-4164-9844-c1f3e771402b" (UID: "46e82bf5-baf2-4164-9844-c1f3e771402b"). InnerVolumeSpecName "kube-api-access-hnp2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.215748 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "46e82bf5-baf2-4164-9844-c1f3e771402b" (UID: "46e82bf5-baf2-4164-9844-c1f3e771402b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.313304 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.313334 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46e82bf5-baf2-4164-9844-c1f3e771402b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.313344 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnp2h\" (UniqueName: \"kubernetes.io/projected/46e82bf5-baf2-4164-9844-c1f3e771402b-kube-api-access-hnp2h\") on node \"crc\" DevicePath \"\"" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.560291 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:09:53 crc kubenswrapper[4576]: W1203 09:09:53.580996 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7c148cb_508f_45ed_a5ea_06b0b4bc51ff.slice/crio-36439799996c8f969642f3c9f307f128ec4c7b08c452583198bdf260bf59c3b4 WatchSource:0}: Error finding container 36439799996c8f969642f3c9f307f128ec4c7b08c452583198bdf260bf59c3b4: Status 404 returned error can't find the container with id 36439799996c8f969642f3c9f307f128ec4c7b08c452583198bdf260bf59c3b4 Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.581390 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.747453 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb" event={"ID":"1f709485-8dc6-4e99-ba88-880d491fca2e","Type":"ContainerStarted","Data":"28da0d419216bf011cd0d06c4569c82164a8129a289afb3d6a0837823dd11c9d"} Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.758840 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff","Type":"ContainerStarted","Data":"36439799996c8f969642f3c9f307f128ec4c7b08c452583198bdf260bf59c3b4"} Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.763998 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4dce317b-36ba-47d9-9175-a50ed2bf038b","Type":"ContainerStarted","Data":"64fde3d445c0c1634611eec4bae7f48eace841a8052e812d5c0049dcbe70c3cf"} Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.766428 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.766493 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-xgjxj" event={"ID":"46e82bf5-baf2-4164-9844-c1f3e771402b","Type":"ContainerDied","Data":"adb41fea1e6221b7da7740ce09626ca9ca9ca6bf285fd0aa7bab38a8dbf20461"} Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.767141 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" Dec 03 09:09:53 crc kubenswrapper[4576]: E1203 09:09:53.775973 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-q744m" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.785361 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 09:09:53 crc kubenswrapper[4576]: I1203 09:09:53.863178 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.007094 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.027963 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-xgjxj"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.080161 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-pjp95"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.313170 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.469792 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ttqv\" (UniqueName: \"kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv\") pod \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.470281 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config\") pod \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\" (UID: \"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd\") " Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.470757 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config" (OuterVolumeSpecName: "config") pod "08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd" (UID: "08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.471342 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.490211 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv" (OuterVolumeSpecName: "kube-api-access-6ttqv") pod "08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd" (UID: "08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd"). InnerVolumeSpecName "kube-api-access-6ttqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.574600 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ttqv\" (UniqueName: \"kubernetes.io/projected/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd-kube-api-access-6ttqv\") on node \"crc\" DevicePath \"\"" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.765708 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.775184 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" event={"ID":"08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd","Type":"ContainerDied","Data":"bf8b76d1354739b07f1c496d8178d5e8b2bdaf4acb6d6f016c3ef99ac8ad4aab"} Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.775279 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-5vtbx" Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.784358 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"288e65b5-6608-4063-9996-eb5180ffbf0e","Type":"ContainerStarted","Data":"e91262a2264d1750a82ac7f4b4f940af167929b1ff38408c6966f07ce00aa46b"} Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.785900 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"83513275-c7cc-450e-9bca-79ca295b7906","Type":"ContainerStarted","Data":"a2365bee5173834d0a4e11430aa3e32e8979d70b4f806f4c055cddb46e425a66"} Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.790294 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-pjp95" event={"ID":"310fba87-b39f-4613-a373-54ecd21ed629","Type":"ContainerStarted","Data":"de514968b6fce24181a9981a9c813cfebaf500d44d27f09557d73b1cf8b5aaca"} Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.792328 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerStarted","Data":"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8"} Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.871006 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.881842 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-5vtbx"] Dec 03 09:09:54 crc kubenswrapper[4576]: I1203 09:09:54.885741 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 09:09:55 crc kubenswrapper[4576]: I1203 09:09:55.691581 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd" path="/var/lib/kubelet/pods/08c0ad6c-3b00-4cb0-901c-b4ce0fb496cd/volumes" Dec 03 09:09:55 crc kubenswrapper[4576]: I1203 09:09:55.692275 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46e82bf5-baf2-4164-9844-c1f3e771402b" path="/var/lib/kubelet/pods/46e82bf5-baf2-4164-9844-c1f3e771402b/volumes" Dec 03 09:09:55 crc kubenswrapper[4576]: I1203 09:09:55.813070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3a2bee80-63d4-41ae-97fc-54a96c4afc6e","Type":"ContainerStarted","Data":"17336c92ddc78bf3a2dcdf300c8fa74ad698c68185b03e00f8096f0ff9c7544c"} Dec 03 09:09:55 crc kubenswrapper[4576]: I1203 09:09:55.824357 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"64d45188-c499-4626-bdd3-6f54a0ed3f14","Type":"ContainerStarted","Data":"cec3b36b8ee1b85ebf109ad693d80c6c779abae5d3e384ddb8e54ac28c83958b"} Dec 03 09:09:55 crc kubenswrapper[4576]: I1203 09:09:55.829048 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerStarted","Data":"35f9490152a4739453e519a118067b6d218f1ac074563732fa6b9864d8353092"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.894046 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"83513275-c7cc-450e-9bca-79ca295b7906","Type":"ContainerStarted","Data":"ff515e45fe2a80798a3b419ba3bbff1dab50c670c511fed9a684ac731dcaba01"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.896378 4576 generic.go:334] "Generic (PLEG): container finished" podID="310fba87-b39f-4613-a373-54ecd21ed629" containerID="39e45123b73197525bf6c6d1fb6c6a61b43effbc31cba7bedd5f2638b3e8a2f3" exitCode=0 Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.896652 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-pjp95" event={"ID":"310fba87-b39f-4613-a373-54ecd21ed629","Type":"ContainerDied","Data":"39e45123b73197525bf6c6d1fb6c6a61b43effbc31cba7bedd5f2638b3e8a2f3"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.899059 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3a2bee80-63d4-41ae-97fc-54a96c4afc6e","Type":"ContainerStarted","Data":"1c8363785ed42c47942afa054f1076aaddcd2bb506047d31367d54c389a3be17"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.902343 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb" event={"ID":"1f709485-8dc6-4e99-ba88-880d491fca2e","Type":"ContainerStarted","Data":"1b9aea27c3075d35c291c541ca7ec859ae947fd2fb97f5dd8388f1757339c333"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.902496 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-25phb" Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.905095 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"64d45188-c499-4626-bdd3-6f54a0ed3f14","Type":"ContainerStarted","Data":"d85508dff0ead789b0f965566bd57b1ab91cfedc67205ca9f88bbef2e16378bd"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.907140 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"288e65b5-6608-4063-9996-eb5180ffbf0e","Type":"ContainerStarted","Data":"8e81648e3f0f9adc56deaf2b2821c0249cf2f552d31488ed315988a5a350ce7c"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.908979 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c7c148cb-508f-45ed-a5ea-06b0b4bc51ff","Type":"ContainerStarted","Data":"b3d0723a6943a9d0a9aa939f3d5b1d72fcc02de0ec71ca85405c904a33e01e05"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.909110 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.910837 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4dce317b-36ba-47d9-9175-a50ed2bf038b","Type":"ContainerStarted","Data":"c7dda649a634af571e69f7c7a1613f11b15679886c666c57073df471b595e306"} Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.911486 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 09:10:02 crc kubenswrapper[4576]: I1203 09:10:02.964597 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-25phb" podStartSLOduration=16.433202852 podStartE2EDuration="24.964565562s" podCreationTimestamp="2025-12-03 09:09:38 +0000 UTC" firstStartedPulling="2025-12-03 09:09:52.975067701 +0000 UTC m=+1800.361044685" lastFinishedPulling="2025-12-03 09:10:01.506430411 +0000 UTC m=+1808.892407395" observedRunningTime="2025-12-03 09:10:02.961689403 +0000 UTC m=+1810.347666387" watchObservedRunningTime="2025-12-03 09:10:02.964565562 +0000 UTC m=+1810.350542546" Dec 03 09:10:03 crc kubenswrapper[4576]: I1203 09:10:03.016169 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=22.479557813 podStartE2EDuration="30.016145485s" podCreationTimestamp="2025-12-03 09:09:33 +0000 UTC" firstStartedPulling="2025-12-03 09:09:53.583735083 +0000 UTC m=+1800.969712067" lastFinishedPulling="2025-12-03 09:10:01.120322755 +0000 UTC m=+1808.506299739" observedRunningTime="2025-12-03 09:10:03.010791438 +0000 UTC m=+1810.396768422" watchObservedRunningTime="2025-12-03 09:10:03.016145485 +0000 UTC m=+1810.402122469" Dec 03 09:10:03 crc kubenswrapper[4576]: I1203 09:10:03.034472 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=19.511731802 podStartE2EDuration="28.034443936s" podCreationTimestamp="2025-12-03 09:09:35 +0000 UTC" firstStartedPulling="2025-12-03 09:09:53.573643706 +0000 UTC m=+1800.959620690" lastFinishedPulling="2025-12-03 09:10:02.09635583 +0000 UTC m=+1809.482332824" observedRunningTime="2025-12-03 09:10:03.029518932 +0000 UTC m=+1810.415495916" watchObservedRunningTime="2025-12-03 09:10:03.034443936 +0000 UTC m=+1810.420420930" Dec 03 09:10:03 crc kubenswrapper[4576]: I1203 09:10:03.919730 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-pjp95" event={"ID":"310fba87-b39f-4613-a373-54ecd21ed629","Type":"ContainerStarted","Data":"300574f3d7e5cfe9cafcb968d7002503b1e4c1d367205f1b089296beb4453f13"} Dec 03 09:10:03 crc kubenswrapper[4576]: I1203 09:10:03.920054 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-pjp95" event={"ID":"310fba87-b39f-4613-a373-54ecd21ed629","Type":"ContainerStarted","Data":"f20e088f461fc4504767a9f4144a2b5aba14b9dab74c8403bba9a1766737c3f5"} Dec 03 09:10:03 crc kubenswrapper[4576]: I1203 09:10:03.946030 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-pjp95" podStartSLOduration=18.534253983 podStartE2EDuration="25.946002615s" podCreationTimestamp="2025-12-03 09:09:38 +0000 UTC" firstStartedPulling="2025-12-03 09:09:54.094871644 +0000 UTC m=+1801.480848628" lastFinishedPulling="2025-12-03 09:10:01.506620276 +0000 UTC m=+1808.892597260" observedRunningTime="2025-12-03 09:10:03.943148697 +0000 UTC m=+1811.329125691" watchObservedRunningTime="2025-12-03 09:10:03.946002615 +0000 UTC m=+1811.331979609" Dec 03 09:10:04 crc kubenswrapper[4576]: I1203 09:10:04.930136 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:10:04 crc kubenswrapper[4576]: I1203 09:10:04.930602 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:10:05 crc kubenswrapper[4576]: I1203 09:10:05.940755 4576 generic.go:334] "Generic (PLEG): container finished" podID="83513275-c7cc-450e-9bca-79ca295b7906" containerID="ff515e45fe2a80798a3b419ba3bbff1dab50c670c511fed9a684ac731dcaba01" exitCode=0 Dec 03 09:10:05 crc kubenswrapper[4576]: I1203 09:10:05.940842 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"83513275-c7cc-450e-9bca-79ca295b7906","Type":"ContainerDied","Data":"ff515e45fe2a80798a3b419ba3bbff1dab50c670c511fed9a684ac731dcaba01"} Dec 03 09:10:06 crc kubenswrapper[4576]: I1203 09:10:06.677420 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:10:06 crc kubenswrapper[4576]: E1203 09:10:06.677744 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:10:06 crc kubenswrapper[4576]: I1203 09:10:06.951695 4576 generic.go:334] "Generic (PLEG): container finished" podID="288e65b5-6608-4063-9996-eb5180ffbf0e" containerID="8e81648e3f0f9adc56deaf2b2821c0249cf2f552d31488ed315988a5a350ce7c" exitCode=0 Dec 03 09:10:06 crc kubenswrapper[4576]: I1203 09:10:06.951744 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"288e65b5-6608-4063-9996-eb5180ffbf0e","Type":"ContainerDied","Data":"8e81648e3f0f9adc56deaf2b2821c0249cf2f552d31488ed315988a5a350ce7c"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.970134 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"83513275-c7cc-450e-9bca-79ca295b7906","Type":"ContainerStarted","Data":"bd05d5063b33ca1995a1b73f8d75575b6dea363f7ba91d68680e0ab9dc33a947"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.973089 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3a2bee80-63d4-41ae-97fc-54a96c4afc6e","Type":"ContainerStarted","Data":"584b7b272561696c4ca01740f4d87474cf84979a575ec75453ca4c5756f51450"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.976321 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerID="27f260bc6d0a51714d0e208762d991a510ae8bb3464731e36c3c801d3f6ed833" exitCode=0 Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.976405 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" event={"ID":"a2e06f57-3350-4eae-9f9b-d544fa698861","Type":"ContainerDied","Data":"27f260bc6d0a51714d0e208762d991a510ae8bb3464731e36c3c801d3f6ed833"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.978255 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"64d45188-c499-4626-bdd3-6f54a0ed3f14","Type":"ContainerStarted","Data":"485f884edee469d52c7353f8a50c41a99a7174615ca2aa4c844b940cfa885d6a"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.985908 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"288e65b5-6608-4063-9996-eb5180ffbf0e","Type":"ContainerStarted","Data":"4b8308b00871ddce4d30c7e6e40cd89895782b22d609d142593e17106eec52cd"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.989574 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q744m" event={"ID":"382f950f-abff-4951-a3d1-d1fdbae63e30","Type":"ContainerDied","Data":"12842b7504adc88b5a3d8946287877b7b97f0713d634f10410c796f4537e041e"} Dec 03 09:10:07 crc kubenswrapper[4576]: I1203 09:10:07.989501 4576 generic.go:334] "Generic (PLEG): container finished" podID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerID="12842b7504adc88b5a3d8946287877b7b97f0713d634f10410c796f4537e041e" exitCode=0 Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.021417 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.037126 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=29.467485222 podStartE2EDuration="37.037093858s" podCreationTimestamp="2025-12-03 09:09:31 +0000 UTC" firstStartedPulling="2025-12-03 09:09:53.780625176 +0000 UTC m=+1801.166602160" lastFinishedPulling="2025-12-03 09:10:01.350233812 +0000 UTC m=+1808.736210796" observedRunningTime="2025-12-03 09:10:07.999953981 +0000 UTC m=+1815.385931015" watchObservedRunningTime="2025-12-03 09:10:08.037093858 +0000 UTC m=+1815.423070842" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.058923 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=19.647156405 podStartE2EDuration="31.058898766s" podCreationTimestamp="2025-12-03 09:09:37 +0000 UTC" firstStartedPulling="2025-12-03 09:09:55.793401919 +0000 UTC m=+1803.179378903" lastFinishedPulling="2025-12-03 09:10:07.20514428 +0000 UTC m=+1814.591121264" observedRunningTime="2025-12-03 09:10:08.050493916 +0000 UTC m=+1815.436470920" watchObservedRunningTime="2025-12-03 09:10:08.058898766 +0000 UTC m=+1815.444875770" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.109021 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=30.032738793 podStartE2EDuration="38.108991877s" podCreationTimestamp="2025-12-03 09:09:30 +0000 UTC" firstStartedPulling="2025-12-03 09:09:53.923330645 +0000 UTC m=+1801.309307629" lastFinishedPulling="2025-12-03 09:10:01.999583729 +0000 UTC m=+1809.385560713" observedRunningTime="2025-12-03 09:10:08.093171954 +0000 UTC m=+1815.479148938" watchObservedRunningTime="2025-12-03 09:10:08.108991877 +0000 UTC m=+1815.494968881" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.135451 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=15.703204041 podStartE2EDuration="27.135429042s" podCreationTimestamp="2025-12-03 09:09:41 +0000 UTC" firstStartedPulling="2025-12-03 09:09:55.778911703 +0000 UTC m=+1803.164888687" lastFinishedPulling="2025-12-03 09:10:07.211136704 +0000 UTC m=+1814.597113688" observedRunningTime="2025-12-03 09:10:08.121808859 +0000 UTC m=+1815.507785833" watchObservedRunningTime="2025-12-03 09:10:08.135429042 +0000 UTC m=+1815.521406026" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.593999 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.998902 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q744m" event={"ID":"382f950f-abff-4951-a3d1-d1fdbae63e30","Type":"ContainerStarted","Data":"7fbb43d7ccd2e9d609a194a1f4f262ea418cb904d1ed59b11ea9b9ffaba89bc3"} Dec 03 09:10:08 crc kubenswrapper[4576]: I1203 09:10:08.999118 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.001065 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" event={"ID":"a2e06f57-3350-4eae-9f9b-d544fa698861","Type":"ContainerStarted","Data":"d83336951f8c8abb2cdf88c6f7dc3b906f0da3d48a7615633db70c9611ffde85"} Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.001488 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.017900 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-q744m" podStartSLOduration=-9223371995.836914 podStartE2EDuration="41.017862374s" podCreationTimestamp="2025-12-03 09:09:28 +0000 UTC" firstStartedPulling="2025-12-03 09:09:29.948173562 +0000 UTC m=+1777.334150546" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:09.015192791 +0000 UTC m=+1816.401169785" watchObservedRunningTime="2025-12-03 09:10:09.017862374 +0000 UTC m=+1816.403839348" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.039942 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" podStartSLOduration=2.8359029 podStartE2EDuration="40.039919827s" podCreationTimestamp="2025-12-03 09:09:29 +0000 UTC" firstStartedPulling="2025-12-03 09:09:30.021463569 +0000 UTC m=+1777.407440553" lastFinishedPulling="2025-12-03 09:10:07.225480496 +0000 UTC m=+1814.611457480" observedRunningTime="2025-12-03 09:10:09.036972247 +0000 UTC m=+1816.422949221" watchObservedRunningTime="2025-12-03 09:10:09.039919827 +0000 UTC m=+1816.425896811" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.270107 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.270178 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 09:10:09 crc kubenswrapper[4576]: I1203 09:10:09.323551 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.022005 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.046820 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.059291 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.315683 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.391841 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.393477 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.396190 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.414355 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.525907 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.525992 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cvd6\" (UniqueName: \"kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.526021 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.526264 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.576291 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-lxvbj"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.578065 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.585730 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.592252 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-lxvbj"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.627876 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cvd6\" (UniqueName: \"kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.627947 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.628022 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.628097 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.629165 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.629447 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.629866 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.666773 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cvd6\" (UniqueName: \"kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6\") pod \"dnsmasq-dns-5bf47b49b7-bj2tg\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.720474 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.728933 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovn-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.728981 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-combined-ca-bundle\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.729016 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.729031 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovs-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.729110 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac988c47-bfaa-4142-a15b-6c69acd494df-config\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.729132 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhjgs\" (UniqueName: \"kubernetes.io/projected/ac988c47-bfaa-4142-a15b-6c69acd494df-kube-api-access-nhjgs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.831800 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac988c47-bfaa-4142-a15b-6c69acd494df-config\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.831871 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhjgs\" (UniqueName: \"kubernetes.io/projected/ac988c47-bfaa-4142-a15b-6c69acd494df-kube-api-access-nhjgs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.831924 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovn-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.831955 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-combined-ca-bundle\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.832009 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovs-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.832034 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.832344 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovn-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.832343 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ac988c47-bfaa-4142-a15b-6c69acd494df-ovs-rundir\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.833545 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac988c47-bfaa-4142-a15b-6c69acd494df-config\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.844437 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-combined-ca-bundle\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.847153 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac988c47-bfaa-4142-a15b-6c69acd494df-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.864760 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.864901 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhjgs\" (UniqueName: \"kubernetes.io/projected/ac988c47-bfaa-4142-a15b-6c69acd494df-kube-api-access-nhjgs\") pod \"ovn-controller-metrics-lxvbj\" (UID: \"ac988c47-bfaa-4142-a15b-6c69acd494df\") " pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.899205 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-lxvbj" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.902608 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.904067 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.914160 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 09:10:10 crc kubenswrapper[4576]: I1203 09:10:10.921855 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.032320 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="dnsmasq-dns" containerID="cri-o://d83336951f8c8abb2cdf88c6f7dc3b906f0da3d48a7615633db70c9611ffde85" gracePeriod=10 Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.034653 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-q744m" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="dnsmasq-dns" containerID="cri-o://7fbb43d7ccd2e9d609a194a1f4f262ea418cb904d1ed59b11ea9b9ffaba89bc3" gracePeriod=10 Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.035126 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.035326 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkqlf\" (UniqueName: \"kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.036967 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.036995 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.037013 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.086317 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.139144 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkqlf\" (UniqueName: \"kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.139264 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.139283 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.139298 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.139381 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.142930 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.143025 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.143140 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.143561 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.212157 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkqlf\" (UniqueName: \"kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf\") pod \"dnsmasq-dns-8554648995-rl6dz\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.234003 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.300912 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-lxvbj"] Dec 03 09:10:11 crc kubenswrapper[4576]: W1203 09:10:11.316235 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac988c47_bfaa_4142_a15b_6c69acd494df.slice/crio-cba05ec5a793e8859a227e0e9f219ba12b0129c1b1f3144604b3a983ddcfab06 WatchSource:0}: Error finding container cba05ec5a793e8859a227e0e9f219ba12b0129c1b1f3144604b3a983ddcfab06: Status 404 returned error can't find the container with id cba05ec5a793e8859a227e0e9f219ba12b0129c1b1f3144604b3a983ddcfab06 Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.404402 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.623744 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.626619 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.629125 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.629440 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-bvn4j" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.630639 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.637393 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.645057 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.752007 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.752716 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-scripts\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.754148 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-config\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.754550 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.758021 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.758441 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnt9d\" (UniqueName: \"kubernetes.io/projected/636d191f-b7e2-4200-8dc3-5b0f386e2499-kube-api-access-vnt9d\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.758610 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.802007 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860744 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-config\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860823 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860861 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860911 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnt9d\" (UniqueName: \"kubernetes.io/projected/636d191f-b7e2-4200-8dc3-5b0f386e2499-kube-api-access-vnt9d\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860945 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.860984 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.861054 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-scripts\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.862589 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-scripts\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.863063 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636d191f-b7e2-4200-8dc3-5b0f386e2499-config\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.864024 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.867146 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.867436 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.868040 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636d191f-b7e2-4200-8dc3-5b0f386e2499-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.888550 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnt9d\" (UniqueName: \"kubernetes.io/projected/636d191f-b7e2-4200-8dc3-5b0f386e2499-kube-api-access-vnt9d\") pod \"ovn-northd-0\" (UID: \"636d191f-b7e2-4200-8dc3-5b0f386e2499\") " pod="openstack/ovn-northd-0" Dec 03 09:10:11 crc kubenswrapper[4576]: I1203 09:10:11.993973 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.046984 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q744m" event={"ID":"382f950f-abff-4951-a3d1-d1fdbae63e30","Type":"ContainerDied","Data":"7fbb43d7ccd2e9d609a194a1f4f262ea418cb904d1ed59b11ea9b9ffaba89bc3"} Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.047001 4576 generic.go:334] "Generic (PLEG): container finished" podID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerID="7fbb43d7ccd2e9d609a194a1f4f262ea418cb904d1ed59b11ea9b9ffaba89bc3" exitCode=0 Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.051927 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-rl6dz" event={"ID":"64ba7793-75a1-4629-878c-9c39ed432ecc","Type":"ContainerStarted","Data":"7f28f168b85f1441a031dac79a0bfc9a4772ba77bbe5f29f9ef5b4740a69d2bd"} Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.054008 4576 generic.go:334] "Generic (PLEG): container finished" podID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerID="d83336951f8c8abb2cdf88c6f7dc3b906f0da3d48a7615633db70c9611ffde85" exitCode=0 Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.054053 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" event={"ID":"a2e06f57-3350-4eae-9f9b-d544fa698861","Type":"ContainerDied","Data":"d83336951f8c8abb2cdf88c6f7dc3b906f0da3d48a7615633db70c9611ffde85"} Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.054445 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.054510 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.055679 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-lxvbj" event={"ID":"ac988c47-bfaa-4142-a15b-6c69acd494df","Type":"ContainerStarted","Data":"cba05ec5a793e8859a227e0e9f219ba12b0129c1b1f3144604b3a983ddcfab06"} Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.056788 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" event={"ID":"9f56f17c-5be1-457f-84ea-56ba8f1c632c","Type":"ContainerStarted","Data":"48cdc2249689c2312b93bf5fbe6d94a9bc55305c4c22e65ff837f445e5c704d6"} Dec 03 09:10:12 crc kubenswrapper[4576]: I1203 09:10:12.463803 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 09:10:12 crc kubenswrapper[4576]: W1203 09:10:12.470464 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod636d191f_b7e2_4200_8dc3_5b0f386e2499.slice/crio-d2d7f472701c6f48f5de29eb18a51c9858f8af97d41d5d5a6a16b5731612470b WatchSource:0}: Error finding container d2d7f472701c6f48f5de29eb18a51c9858f8af97d41d5d5a6a16b5731612470b: Status 404 returned error can't find the container with id d2d7f472701c6f48f5de29eb18a51c9858f8af97d41d5d5a6a16b5731612470b Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.019496 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.021439 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.028151 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.053430 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdq74\" (UniqueName: \"kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.057777 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.057855 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.104319 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"636d191f-b7e2-4200-8dc3-5b0f386e2499","Type":"ContainerStarted","Data":"d2d7f472701c6f48f5de29eb18a51c9858f8af97d41d5d5a6a16b5731612470b"} Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.159398 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdq74\" (UniqueName: \"kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.159511 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.159577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.160045 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.161284 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.194550 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdq74\" (UniqueName: \"kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74\") pod \"community-operators-6p6d5\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.195451 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.195482 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 09:10:13 crc kubenswrapper[4576]: I1203 09:10:13.357651 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.084389 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:14 crc kubenswrapper[4576]: W1203 09:10:14.086501 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4710c074_d5a4_4fdf_9d10_e0f0934c2e44.slice/crio-04fb02eeb19349e6bc1539d306d3e9a5693a74650d26253172d47fad9860c357 WatchSource:0}: Error finding container 04fb02eeb19349e6bc1539d306d3e9a5693a74650d26253172d47fad9860c357: Status 404 returned error can't find the container with id 04fb02eeb19349e6bc1539d306d3e9a5693a74650d26253172d47fad9860c357 Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.142210 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-lxvbj" event={"ID":"ac988c47-bfaa-4142-a15b-6c69acd494df","Type":"ContainerStarted","Data":"f9e4c6996b20fc79f35d388feaef2c8ceb9d3322e90115e99f6a2f1c2a7a3a67"} Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.148218 4576 generic.go:334] "Generic (PLEG): container finished" podID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerID="4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2" exitCode=0 Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.148277 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" event={"ID":"9f56f17c-5be1-457f-84ea-56ba8f1c632c","Type":"ContainerDied","Data":"4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2"} Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.157666 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q744m" event={"ID":"382f950f-abff-4951-a3d1-d1fdbae63e30","Type":"ContainerDied","Data":"786ef965337b00927d6cc9ff8e9d8b1034c607ff772b20d3f5e2d77fe67d3357"} Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.157736 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="786ef965337b00927d6cc9ff8e9d8b1034c607ff772b20d3f5e2d77fe67d3357" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.164609 4576 generic.go:334] "Generic (PLEG): container finished" podID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerID="ebd354bd34ac54186ae1be68e1898bc185497f25a10cb3904091463245432245" exitCode=0 Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.164692 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-rl6dz" event={"ID":"64ba7793-75a1-4629-878c-9c39ed432ecc","Type":"ContainerDied","Data":"ebd354bd34ac54186ae1be68e1898bc185497f25a10cb3904091463245432245"} Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.186811 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-lxvbj" podStartSLOduration=4.1867792 podStartE2EDuration="4.1867792s" podCreationTimestamp="2025-12-03 09:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:14.178452422 +0000 UTC m=+1821.564429406" watchObservedRunningTime="2025-12-03 09:10:14.1867792 +0000 UTC m=+1821.572756184" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.190792 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerStarted","Data":"04fb02eeb19349e6bc1539d306d3e9a5693a74650d26253172d47fad9860c357"} Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.385562 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.407060 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485145 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config\") pod \"a2e06f57-3350-4eae-9f9b-d544fa698861\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485189 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95fng\" (UniqueName: \"kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng\") pod \"a2e06f57-3350-4eae-9f9b-d544fa698861\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485277 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config\") pod \"382f950f-abff-4951-a3d1-d1fdbae63e30\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485296 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4vbg\" (UniqueName: \"kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg\") pod \"382f950f-abff-4951-a3d1-d1fdbae63e30\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485345 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc\") pod \"a2e06f57-3350-4eae-9f9b-d544fa698861\" (UID: \"a2e06f57-3350-4eae-9f9b-d544fa698861\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.485362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc\") pod \"382f950f-abff-4951-a3d1-d1fdbae63e30\" (UID: \"382f950f-abff-4951-a3d1-d1fdbae63e30\") " Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.513960 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng" (OuterVolumeSpecName: "kube-api-access-95fng") pod "a2e06f57-3350-4eae-9f9b-d544fa698861" (UID: "a2e06f57-3350-4eae-9f9b-d544fa698861"). InnerVolumeSpecName "kube-api-access-95fng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.514837 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg" (OuterVolumeSpecName: "kube-api-access-q4vbg") pod "382f950f-abff-4951-a3d1-d1fdbae63e30" (UID: "382f950f-abff-4951-a3d1-d1fdbae63e30"). InnerVolumeSpecName "kube-api-access-q4vbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.590960 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4vbg\" (UniqueName: \"kubernetes.io/projected/382f950f-abff-4951-a3d1-d1fdbae63e30-kube-api-access-q4vbg\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.590984 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95fng\" (UniqueName: \"kubernetes.io/projected/a2e06f57-3350-4eae-9f9b-d544fa698861-kube-api-access-95fng\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.634099 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "382f950f-abff-4951-a3d1-d1fdbae63e30" (UID: "382f950f-abff-4951-a3d1-d1fdbae63e30"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.669333 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config" (OuterVolumeSpecName: "config") pod "382f950f-abff-4951-a3d1-d1fdbae63e30" (UID: "382f950f-abff-4951-a3d1-d1fdbae63e30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.692655 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.692684 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/382f950f-abff-4951-a3d1-d1fdbae63e30-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.743028 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a2e06f57-3350-4eae-9f9b-d544fa698861" (UID: "a2e06f57-3350-4eae-9f9b-d544fa698861"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.780674 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config" (OuterVolumeSpecName: "config") pod "a2e06f57-3350-4eae-9f9b-d544fa698861" (UID: "a2e06f57-3350-4eae-9f9b-d544fa698861"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.798302 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:14 crc kubenswrapper[4576]: I1203 09:10:14.798362 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2e06f57-3350-4eae-9f9b-d544fa698861-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.202788 4576 generic.go:334] "Generic (PLEG): container finished" podID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerID="6236ce1d8bd4be8d1709db3ce9ae473e4211c8970d1f76e2119ce41cc688aee4" exitCode=0 Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.202933 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerDied","Data":"6236ce1d8bd4be8d1709db3ce9ae473e4211c8970d1f76e2119ce41cc688aee4"} Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.214046 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" event={"ID":"9f56f17c-5be1-457f-84ea-56ba8f1c632c","Type":"ContainerStarted","Data":"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59"} Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.214216 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.216999 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-rl6dz" event={"ID":"64ba7793-75a1-4629-878c-9c39ed432ecc","Type":"ContainerStarted","Data":"7205b64365f2278c35b093827d946ffa68df66e83aa5c726909c5ff203f4bc5c"} Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.217142 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.225501 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.226597 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gjhtr" event={"ID":"a2e06f57-3350-4eae-9f9b-d544fa698861","Type":"ContainerDied","Data":"69c0d42daf62e0d9456f5b1adadf8bb77d6b5a6531fcaf855eae37f0541cb3fd"} Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.226692 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q744m" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.226693 4576 scope.go:117] "RemoveContainer" containerID="d83336951f8c8abb2cdf88c6f7dc3b906f0da3d48a7615633db70c9611ffde85" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.268479 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" podStartSLOduration=5.26845927 podStartE2EDuration="5.26845927s" podCreationTimestamp="2025-12-03 09:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:15.265904959 +0000 UTC m=+1822.651881943" watchObservedRunningTime="2025-12-03 09:10:15.26845927 +0000 UTC m=+1822.654436254" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.296742 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-rl6dz" podStartSLOduration=5.296705983 podStartE2EDuration="5.296705983s" podCreationTimestamp="2025-12-03 09:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:15.294881663 +0000 UTC m=+1822.680858647" watchObservedRunningTime="2025-12-03 09:10:15.296705983 +0000 UTC m=+1822.682682977" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.318670 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.326982 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q744m"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.344726 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.354463 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gjhtr"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.595159 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.606609 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675063 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:10:15 crc kubenswrapper[4576]: E1203 09:10:15.675412 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="init" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675433 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="init" Dec 03 09:10:15 crc kubenswrapper[4576]: E1203 09:10:15.675460 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="init" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675466 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="init" Dec 03 09:10:15 crc kubenswrapper[4576]: E1203 09:10:15.675478 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675484 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: E1203 09:10:15.675501 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675507 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675678 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.675695 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" containerName="dnsmasq-dns" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.677351 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.685665 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" path="/var/lib/kubelet/pods/382f950f-abff-4951-a3d1-d1fdbae63e30/volumes" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.686250 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2e06f57-3350-4eae-9f9b-d544fa698861" path="/var/lib/kubelet/pods/a2e06f57-3350-4eae-9f9b-d544fa698861/volumes" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.729335 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.838873 4576 scope.go:117] "RemoveContainer" containerID="27f260bc6d0a51714d0e208762d991a510ae8bb3464731e36c3c801d3f6ed833" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.852558 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.852596 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67xjq\" (UniqueName: \"kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.852654 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.852736 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.852783 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.955773 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.955829 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67xjq\" (UniqueName: \"kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.955900 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.955963 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.956005 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.956956 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.957832 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.958412 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.961853 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.993285 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b78nr"] Dec 03 09:10:15 crc kubenswrapper[4576]: I1203 09:10:15.995131 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.075096 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-catalog-content\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.075250 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6jbv\" (UniqueName: \"kubernetes.io/projected/15f066ad-3c0d-409b-9c47-e9a36cf6660d-kube-api-access-h6jbv\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.075275 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-utilities\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.127189 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b78nr"] Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.193689 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6jbv\" (UniqueName: \"kubernetes.io/projected/15f066ad-3c0d-409b-9c47-e9a36cf6660d-kube-api-access-h6jbv\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.193793 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-utilities\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.194209 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-catalog-content\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.195313 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-catalog-content\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.195414 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f066ad-3c0d-409b-9c47-e9a36cf6660d-utilities\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.243276 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6jbv\" (UniqueName: \"kubernetes.io/projected/15f066ad-3c0d-409b-9c47-e9a36cf6660d-kube-api-access-h6jbv\") pod \"redhat-operators-b78nr\" (UID: \"15f066ad-3c0d-409b-9c47-e9a36cf6660d\") " pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.254374 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67xjq\" (UniqueName: \"kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq\") pod \"dnsmasq-dns-b8fbc5445-dfdr5\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.296341 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.609358 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.813783 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.837053 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.857080 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.857313 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.857445 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.867759 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-p9qg2" Dec 03 09:10:16 crc kubenswrapper[4576]: I1203 09:10:16.911298 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.021331 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-lock\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.021373 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.021511 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.021560 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhbrr\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-kube-api-access-rhbrr\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.021584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-cache\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.122855 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.123223 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhbrr\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-kube-api-access-rhbrr\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.123262 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-cache\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.123305 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-lock\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.123333 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.123900 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.124384 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.124420 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.124483 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:17.624457468 +0000 UTC m=+1825.010434462 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.124793 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-cache\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.127621 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8e0694dc-a49e-4136-a206-3bb5c8acd48c-lock\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.222809 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-649tl"] Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.228284 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.254272 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.254554 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.254710 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.255998 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhbrr\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-kube-api-access-rhbrr\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.258662 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.269486 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.286900 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-649tl"] Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.297655 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="dnsmasq-dns" containerID="cri-o://55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59" gracePeriod=10 Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.297751 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"636d191f-b7e2-4200-8dc3-5b0f386e2499","Type":"ContainerStarted","Data":"68a514c0f3b7f57339b37e19c587f7cfa764000b708952aa17d3346bd84f23b7"} Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.297781 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"636d191f-b7e2-4200-8dc3-5b0f386e2499","Type":"ContainerStarted","Data":"c4d55cc33f217f490c451ff4beee7abe68fe037ba28e67c2de436d65d381a309"} Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.364863 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.364920 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.364955 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.365055 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.365092 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zvxv\" (UniqueName: \"kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.365308 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.365349 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.384601 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b78nr"] Dec 03 09:10:17 crc kubenswrapper[4576]: W1203 09:10:17.409699 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15f066ad_3c0d_409b_9c47_e9a36cf6660d.slice/crio-8ff7419530f86e7b6b880465a877e14bee3a36a27ba4ec6529c862ca0955bdc0 WatchSource:0}: Error finding container 8ff7419530f86e7b6b880465a877e14bee3a36a27ba4ec6529c862ca0955bdc0: Status 404 returned error can't find the container with id 8ff7419530f86e7b6b880465a877e14bee3a36a27ba4ec6529c862ca0955bdc0 Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466582 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466634 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466696 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466719 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zvxv\" (UniqueName: \"kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466842 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466870 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.466925 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.468766 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.468981 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.469031 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.475619 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.477062 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.493941 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.495965 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zvxv\" (UniqueName: \"kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv\") pod \"swift-ring-rebalance-649tl\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.673979 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.674707 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.675006 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.675023 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: E1203 09:10:17.675075 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:18.675058401 +0000 UTC m=+1826.061035375 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.844861 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.980235 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config\") pod \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.980368 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cvd6\" (UniqueName: \"kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6\") pod \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.980422 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc\") pod \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " Dec 03 09:10:17 crc kubenswrapper[4576]: I1203 09:10:17.980471 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb\") pod \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\" (UID: \"9f56f17c-5be1-457f-84ea-56ba8f1c632c\") " Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.003748 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6" (OuterVolumeSpecName: "kube-api-access-7cvd6") pod "9f56f17c-5be1-457f-84ea-56ba8f1c632c" (UID: "9f56f17c-5be1-457f-84ea-56ba8f1c632c"). InnerVolumeSpecName "kube-api-access-7cvd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.050130 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config" (OuterVolumeSpecName: "config") pod "9f56f17c-5be1-457f-84ea-56ba8f1c632c" (UID: "9f56f17c-5be1-457f-84ea-56ba8f1c632c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.062662 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f56f17c-5be1-457f-84ea-56ba8f1c632c" (UID: "9f56f17c-5be1-457f-84ea-56ba8f1c632c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.072888 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f56f17c-5be1-457f-84ea-56ba8f1c632c" (UID: "9f56f17c-5be1-457f-84ea-56ba8f1c632c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.082862 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.083156 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.083170 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cvd6\" (UniqueName: \"kubernetes.io/projected/9f56f17c-5be1-457f-84ea-56ba8f1c632c-kube-api-access-7cvd6\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.083183 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f56f17c-5be1-457f-84ea-56ba8f1c632c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.317163 4576 generic.go:334] "Generic (PLEG): container finished" podID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerID="55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59" exitCode=0 Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.317237 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.317251 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" event={"ID":"9f56f17c-5be1-457f-84ea-56ba8f1c632c","Type":"ContainerDied","Data":"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.317292 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-bj2tg" event={"ID":"9f56f17c-5be1-457f-84ea-56ba8f1c632c","Type":"ContainerDied","Data":"48cdc2249689c2312b93bf5fbe6d94a9bc55305c4c22e65ff837f445e5c704d6"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.317313 4576 scope.go:117] "RemoveContainer" containerID="55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.324178 4576 generic.go:334] "Generic (PLEG): container finished" podID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerID="12df4d239475c86c3a581951d92d16df38830cca374d0332c6fc471013a5b2e4" exitCode=0 Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.324255 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerDied","Data":"12df4d239475c86c3a581951d92d16df38830cca374d0332c6fc471013a5b2e4"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.329389 4576 generic.go:334] "Generic (PLEG): container finished" podID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerID="4a74b884b9e59fc885fc851ef6fe5473aaeaad653e43f2f026e624f29daeab06" exitCode=0 Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.329463 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" event={"ID":"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7","Type":"ContainerDied","Data":"4a74b884b9e59fc885fc851ef6fe5473aaeaad653e43f2f026e624f29daeab06"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.329500 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" event={"ID":"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7","Type":"ContainerStarted","Data":"5b7fa9c2d18128ff368e9c781a90fe3aa6b1015491f139bb7225737a04c27740"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.336117 4576 generic.go:334] "Generic (PLEG): container finished" podID="15f066ad-3c0d-409b-9c47-e9a36cf6660d" containerID="b7b879cfa72af7f89db89e6aa4b2d37d11e6f186102c2a3955632e180fbd1bcd" exitCode=0 Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.337349 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b78nr" event={"ID":"15f066ad-3c0d-409b-9c47-e9a36cf6660d","Type":"ContainerDied","Data":"b7b879cfa72af7f89db89e6aa4b2d37d11e6f186102c2a3955632e180fbd1bcd"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.337383 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b78nr" event={"ID":"15f066ad-3c0d-409b-9c47-e9a36cf6660d","Type":"ContainerStarted","Data":"8ff7419530f86e7b6b880465a877e14bee3a36a27ba4ec6529c862ca0955bdc0"} Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.337397 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.370692 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-649tl"] Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.413736 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.433352 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.963484766 podStartE2EDuration="7.433329901s" podCreationTimestamp="2025-12-03 09:10:11 +0000 UTC" firstStartedPulling="2025-12-03 09:10:12.472828762 +0000 UTC m=+1819.858805746" lastFinishedPulling="2025-12-03 09:10:15.942673897 +0000 UTC m=+1823.328650881" observedRunningTime="2025-12-03 09:10:18.433112945 +0000 UTC m=+1825.819089929" watchObservedRunningTime="2025-12-03 09:10:18.433329901 +0000 UTC m=+1825.819306885" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.502361 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.532599 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-bj2tg"] Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.607006 4576 scope.go:117] "RemoveContainer" containerID="4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.646722 4576 scope.go:117] "RemoveContainer" containerID="55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59" Dec 03 09:10:18 crc kubenswrapper[4576]: E1203 09:10:18.647320 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59\": container with ID starting with 55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59 not found: ID does not exist" containerID="55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.647364 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59"} err="failed to get container status \"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59\": rpc error: code = NotFound desc = could not find container \"55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59\": container with ID starting with 55c83df8a1b34e279d802a31aa68b9d18f0fb8b44b9cdef524beedb1303a8a59 not found: ID does not exist" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.647414 4576 scope.go:117] "RemoveContainer" containerID="4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2" Dec 03 09:10:18 crc kubenswrapper[4576]: E1203 09:10:18.647890 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2\": container with ID starting with 4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2 not found: ID does not exist" containerID="4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.647917 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2"} err="failed to get container status \"4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2\": rpc error: code = NotFound desc = could not find container \"4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2\": container with ID starting with 4feafa8312766f1d7128f3da04b325722870c7f1edcdd0aa9c7c1bd7896bced2 not found: ID does not exist" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.659415 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 09:10:18 crc kubenswrapper[4576]: I1203 09:10:18.694717 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:18 crc kubenswrapper[4576]: E1203 09:10:18.694943 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:18 crc kubenswrapper[4576]: E1203 09:10:18.694968 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:18 crc kubenswrapper[4576]: E1203 09:10:18.695030 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:20.695010919 +0000 UTC m=+1828.080987903 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.013579 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-q744m" podUID="382f950f-abff-4951-a3d1-d1fdbae63e30" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.96:5353: i/o timeout" Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.366709 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-649tl" event={"ID":"801bf024-9418-42e1-893f-0a4b82d411b4","Type":"ContainerStarted","Data":"a09fe7d150f4d26cee19e2f8ec7f350958846a5809bf1e6a4dbd024fb26fc318"} Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.368675 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" event={"ID":"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7","Type":"ContainerStarted","Data":"bf907d636153951783862262bfd32ff9c662ad26d21b8b1e9058b8569130f578"} Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.368782 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.392873 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" podStartSLOduration=4.392668909 podStartE2EDuration="4.392668909s" podCreationTimestamp="2025-12-03 09:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:19.389860342 +0000 UTC m=+1826.775837326" watchObservedRunningTime="2025-12-03 09:10:19.392668909 +0000 UTC m=+1826.778645893" Dec 03 09:10:19 crc kubenswrapper[4576]: I1203 09:10:19.693510 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" path="/var/lib/kubelet/pods/9f56f17c-5be1-457f-84ea-56ba8f1c632c/volumes" Dec 03 09:10:20 crc kubenswrapper[4576]: I1203 09:10:20.393605 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerStarted","Data":"9c4f902b4f6f47edbb34a5d1709a51931f859ec90fb951677e13c4fe6d54f88a"} Dec 03 09:10:20 crc kubenswrapper[4576]: I1203 09:10:20.419421 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6p6d5" podStartSLOduration=4.667748658 podStartE2EDuration="8.419399334s" podCreationTimestamp="2025-12-03 09:10:12 +0000 UTC" firstStartedPulling="2025-12-03 09:10:15.807958017 +0000 UTC m=+1823.193934991" lastFinishedPulling="2025-12-03 09:10:19.559608683 +0000 UTC m=+1826.945585667" observedRunningTime="2025-12-03 09:10:20.407348413 +0000 UTC m=+1827.793325417" watchObservedRunningTime="2025-12-03 09:10:20.419399334 +0000 UTC m=+1827.805376318" Dec 03 09:10:20 crc kubenswrapper[4576]: I1203 09:10:20.677138 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:10:20 crc kubenswrapper[4576]: E1203 09:10:20.677373 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:10:20 crc kubenswrapper[4576]: I1203 09:10:20.734064 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:20 crc kubenswrapper[4576]: E1203 09:10:20.734243 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:20 crc kubenswrapper[4576]: E1203 09:10:20.734294 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:20 crc kubenswrapper[4576]: E1203 09:10:20.734344 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:24.73432675 +0000 UTC m=+1832.120303734 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:21 crc kubenswrapper[4576]: I1203 09:10:21.235608 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:21 crc kubenswrapper[4576]: I1203 09:10:21.924083 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 09:10:22 crc kubenswrapper[4576]: I1203 09:10:22.026800 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.208393 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-899d-account-create-update-p4njf"] Dec 03 09:10:23 crc kubenswrapper[4576]: E1203 09:10:23.208825 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="init" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.208838 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="init" Dec 03 09:10:23 crc kubenswrapper[4576]: E1203 09:10:23.208855 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="dnsmasq-dns" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.208863 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="dnsmasq-dns" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.209076 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f56f17c-5be1-457f-84ea-56ba8f1c632c" containerName="dnsmasq-dns" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.209663 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.214795 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.215466 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-899d-account-create-update-p4njf"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.271493 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-rvmn2"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.273344 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.289754 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmxc4\" (UniqueName: \"kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.290096 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-rvmn2"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.291975 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.358521 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.358626 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.393465 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.393586 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm8np\" (UniqueName: \"kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.393649 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmxc4\" (UniqueName: \"kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.393705 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.394253 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.429010 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmxc4\" (UniqueName: \"kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4\") pod \"keystone-899d-account-create-update-p4njf\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.448272 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.495344 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.498807 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm8np\" (UniqueName: \"kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.498991 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.513613 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tjp4r"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.514800 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.522878 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm8np\" (UniqueName: \"kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np\") pod \"keystone-db-create-rvmn2\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.523104 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tjp4r"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.552168 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.601150 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.601323 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn2cj\" (UniqueName: \"kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.602985 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.627147 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1fd2-account-create-update-td6rc"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.628437 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.634810 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.641639 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1fd2-account-create-update-td6rc"] Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.702891 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn2cj\" (UniqueName: \"kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.702983 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcf72\" (UniqueName: \"kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.703068 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.703301 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.704111 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.724787 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn2cj\" (UniqueName: \"kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj\") pod \"placement-db-create-tjp4r\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.804638 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcf72\" (UniqueName: \"kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.805106 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.806163 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.820725 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcf72\" (UniqueName: \"kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72\") pod \"placement-1fd2-account-create-update-td6rc\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.874891 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:23 crc kubenswrapper[4576]: I1203 09:10:23.953676 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:24 crc kubenswrapper[4576]: I1203 09:10:24.544127 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-899d-account-create-update-p4njf"] Dec 03 09:10:24 crc kubenswrapper[4576]: I1203 09:10:24.735250 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:24 crc kubenswrapper[4576]: E1203 09:10:24.735543 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:24 crc kubenswrapper[4576]: E1203 09:10:24.735580 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:24 crc kubenswrapper[4576]: E1203 09:10:24.735705 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:32.735647512 +0000 UTC m=+1840.121624496 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:24 crc kubenswrapper[4576]: I1203 09:10:24.843297 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-rvmn2"] Dec 03 09:10:24 crc kubenswrapper[4576]: I1203 09:10:24.853480 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tjp4r"] Dec 03 09:10:24 crc kubenswrapper[4576]: W1203 09:10:24.855418 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bd81d26_be9f_433b_8ec3_12afd1d85eeb.slice/crio-7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf WatchSource:0}: Error finding container 7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf: Status 404 returned error can't find the container with id 7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf Dec 03 09:10:24 crc kubenswrapper[4576]: I1203 09:10:24.859937 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1fd2-account-create-update-td6rc"] Dec 03 09:10:24 crc kubenswrapper[4576]: W1203 09:10:24.868819 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02edecfe_6126_4456_bedd_6ce9a3f68ac7.slice/crio-9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4 WatchSource:0}: Error finding container 9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4: Status 404 returned error can't find the container with id 9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4 Dec 03 09:10:24 crc kubenswrapper[4576]: W1203 09:10:24.872569 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae79285c_f19f_4d6c_9f02_fe5b6d1de88b.slice/crio-9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab WatchSource:0}: Error finding container 9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab: Status 404 returned error can't find the container with id 9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.457103 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tjp4r" event={"ID":"02edecfe-6126-4456-bedd-6ce9a3f68ac7","Type":"ContainerStarted","Data":"2b88a3596a300fdd6c4f6641b9ef789e1d5f6985788947d6f60437277f131594"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.457157 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tjp4r" event={"ID":"02edecfe-6126-4456-bedd-6ce9a3f68ac7","Type":"ContainerStarted","Data":"9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.458208 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvmn2" event={"ID":"3bd81d26-be9f-433b-8ec3-12afd1d85eeb","Type":"ContainerStarted","Data":"7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.459760 4576 generic.go:334] "Generic (PLEG): container finished" podID="d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" containerID="b7fb0b79388d2b032c02ea9d3b5ba3bf2b6329c37f653c7f2ea45d59da08ba36" exitCode=0 Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.460270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-899d-account-create-update-p4njf" event={"ID":"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc","Type":"ContainerDied","Data":"b7fb0b79388d2b032c02ea9d3b5ba3bf2b6329c37f653c7f2ea45d59da08ba36"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.460306 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-899d-account-create-update-p4njf" event={"ID":"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc","Type":"ContainerStarted","Data":"6554691ae57c276ad7f8ef40b78dda524e7c6fef83bca238e9e9ffb9a6d5e215"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.464421 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-649tl" event={"ID":"801bf024-9418-42e1-893f-0a4b82d411b4","Type":"ContainerStarted","Data":"dc9c6166efda1fe8bc600a8503a3a36d653e21cf1ef6c336eac9b9fba5c0d641"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.470693 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1fd2-account-create-update-td6rc" event={"ID":"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b","Type":"ContainerStarted","Data":"9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab"} Dec 03 09:10:25 crc kubenswrapper[4576]: I1203 09:10:25.504730 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-649tl" podStartSLOduration=2.70297128 podStartE2EDuration="8.504704709s" podCreationTimestamp="2025-12-03 09:10:17 +0000 UTC" firstStartedPulling="2025-12-03 09:10:18.444101487 +0000 UTC m=+1825.830078471" lastFinishedPulling="2025-12-03 09:10:24.245834916 +0000 UTC m=+1831.631811900" observedRunningTime="2025-12-03 09:10:25.495294491 +0000 UTC m=+1832.881271485" watchObservedRunningTime="2025-12-03 09:10:25.504704709 +0000 UTC m=+1832.890681693" Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.298843 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.361845 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.362085 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-rl6dz" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="dnsmasq-dns" containerID="cri-o://7205b64365f2278c35b093827d946ffa68df66e83aa5c726909c5ff203f4bc5c" gracePeriod=10 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.490091 4576 generic.go:334] "Generic (PLEG): container finished" podID="ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" containerID="67c805e03d4cdae5ff7e70e7aa7833be737d40fb4b9666116427425c319dc94c" exitCode=0 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.490539 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1fd2-account-create-update-td6rc" event={"ID":"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b","Type":"ContainerDied","Data":"67c805e03d4cdae5ff7e70e7aa7833be737d40fb4b9666116427425c319dc94c"} Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.495597 4576 generic.go:334] "Generic (PLEG): container finished" podID="02edecfe-6126-4456-bedd-6ce9a3f68ac7" containerID="2b88a3596a300fdd6c4f6641b9ef789e1d5f6985788947d6f60437277f131594" exitCode=0 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.495663 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tjp4r" event={"ID":"02edecfe-6126-4456-bedd-6ce9a3f68ac7","Type":"ContainerDied","Data":"2b88a3596a300fdd6c4f6641b9ef789e1d5f6985788947d6f60437277f131594"} Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.497450 4576 generic.go:334] "Generic (PLEG): container finished" podID="3bd81d26-be9f-433b-8ec3-12afd1d85eeb" containerID="f7eb83e5597f9f67358b7a9851c89a69975f992eec0384bc07aa412b0ab15a36" exitCode=0 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.497496 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvmn2" event={"ID":"3bd81d26-be9f-433b-8ec3-12afd1d85eeb","Type":"ContainerDied","Data":"f7eb83e5597f9f67358b7a9851c89a69975f992eec0384bc07aa412b0ab15a36"} Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.500275 4576 generic.go:334] "Generic (PLEG): container finished" podID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerID="7205b64365f2278c35b093827d946ffa68df66e83aa5c726909c5ff203f4bc5c" exitCode=0 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.500316 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-rl6dz" event={"ID":"64ba7793-75a1-4629-878c-9c39ed432ecc","Type":"ContainerDied","Data":"7205b64365f2278c35b093827d946ffa68df66e83aa5c726909c5ff203f4bc5c"} Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.505456 4576 generic.go:334] "Generic (PLEG): container finished" podID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerID="20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8" exitCode=0 Dec 03 09:10:26 crc kubenswrapper[4576]: I1203 09:10:26.506807 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerDied","Data":"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8"} Dec 03 09:10:27 crc kubenswrapper[4576]: I1203 09:10:27.082443 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 09:10:27 crc kubenswrapper[4576]: I1203 09:10:27.533217 4576 generic.go:334] "Generic (PLEG): container finished" podID="c20e6200-091a-47c3-afef-d1b4d9538309" containerID="35f9490152a4739453e519a118067b6d218f1ac074563732fa6b9864d8353092" exitCode=0 Dec 03 09:10:27 crc kubenswrapper[4576]: I1203 09:10:27.533270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerDied","Data":"35f9490152a4739453e519a118067b6d218f1ac074563732fa6b9864d8353092"} Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.790818 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-l5979"] Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.792074 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-l5979" Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.800139 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-l5979"] Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.907656 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2f64-account-create-update-22w48"] Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.908957 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.913302 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.927519 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft6gn\" (UniqueName: \"kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.927609 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:28 crc kubenswrapper[4576]: I1203 09:10:28.940291 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2f64-account-create-update-22w48"] Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.029105 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.029155 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbvxf\" (UniqueName: \"kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.029241 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft6gn\" (UniqueName: \"kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.029838 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.030751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.047710 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft6gn\" (UniqueName: \"kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn\") pod \"glance-db-create-l5979\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " pod="openstack/glance-db-create-l5979" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.124692 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-l5979" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.131904 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.133038 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbvxf\" (UniqueName: \"kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.132991 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.152685 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbvxf\" (UniqueName: \"kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf\") pod \"glance-2f64-account-create-update-22w48\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:29 crc kubenswrapper[4576]: I1203 09:10:29.232102 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:31 crc kubenswrapper[4576]: I1203 09:10:31.234963 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-rl6dz" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.576607 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvmn2" event={"ID":"3bd81d26-be9f-433b-8ec3-12afd1d85eeb","Type":"ContainerDied","Data":"7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf"} Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.576933 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7254980831b76eeffd4121f25a8cdf83e528cc7da206b42e949c44743f1952cf" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.677383 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:10:32 crc kubenswrapper[4576]: E1203 09:10:32.677782 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.823749 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:32 crc kubenswrapper[4576]: E1203 09:10:32.823900 4576 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 09:10:32 crc kubenswrapper[4576]: E1203 09:10:32.823914 4576 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 09:10:32 crc kubenswrapper[4576]: E1203 09:10:32.823959 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift podName:8e0694dc-a49e-4136-a206-3bb5c8acd48c nodeName:}" failed. No retries permitted until 2025-12-03 09:10:48.823944316 +0000 UTC m=+1856.209921290 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift") pod "swift-storage-0" (UID: "8e0694dc-a49e-4136-a206-3bb5c8acd48c") : configmap "swift-ring-files" not found Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.856328 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.879222 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.901179 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.925192 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts\") pod \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.925283 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts\") pod \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.925364 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm8np\" (UniqueName: \"kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np\") pod \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\" (UID: \"3bd81d26-be9f-433b-8ec3-12afd1d85eeb\") " Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.925391 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn2cj\" (UniqueName: \"kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj\") pod \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\" (UID: \"02edecfe-6126-4456-bedd-6ce9a3f68ac7\") " Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.927727 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3bd81d26-be9f-433b-8ec3-12afd1d85eeb" (UID: "3bd81d26-be9f-433b-8ec3-12afd1d85eeb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.929267 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "02edecfe-6126-4456-bedd-6ce9a3f68ac7" (UID: "02edecfe-6126-4456-bedd-6ce9a3f68ac7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.939007 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.943213 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj" (OuterVolumeSpecName: "kube-api-access-rn2cj") pod "02edecfe-6126-4456-bedd-6ce9a3f68ac7" (UID: "02edecfe-6126-4456-bedd-6ce9a3f68ac7"). InnerVolumeSpecName "kube-api-access-rn2cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.943641 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:32 crc kubenswrapper[4576]: I1203 09:10:32.954153 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np" (OuterVolumeSpecName: "kube-api-access-lm8np") pod "3bd81d26-be9f-433b-8ec3-12afd1d85eeb" (UID: "3bd81d26-be9f-433b-8ec3-12afd1d85eeb"). InnerVolumeSpecName "kube-api-access-lm8np". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.026704 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkqlf\" (UniqueName: \"kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf\") pod \"64ba7793-75a1-4629-878c-9c39ed432ecc\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.026865 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts\") pod \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.026900 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb\") pod \"64ba7793-75a1-4629-878c-9c39ed432ecc\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.026925 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts\") pod \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.026980 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb\") pod \"64ba7793-75a1-4629-878c-9c39ed432ecc\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.027008 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcf72\" (UniqueName: \"kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72\") pod \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\" (UID: \"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.027106 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config\") pod \"64ba7793-75a1-4629-878c-9c39ed432ecc\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.027145 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc\") pod \"64ba7793-75a1-4629-878c-9c39ed432ecc\" (UID: \"64ba7793-75a1-4629-878c-9c39ed432ecc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.027193 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmxc4\" (UniqueName: \"kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4\") pod \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\" (UID: \"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc\") " Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.028845 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn2cj\" (UniqueName: \"kubernetes.io/projected/02edecfe-6126-4456-bedd-6ce9a3f68ac7-kube-api-access-rn2cj\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.028909 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02edecfe-6126-4456-bedd-6ce9a3f68ac7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.028928 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.028941 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm8np\" (UniqueName: \"kubernetes.io/projected/3bd81d26-be9f-433b-8ec3-12afd1d85eeb-kube-api-access-lm8np\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.037130 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" (UID: "ae79285c-f19f-4d6c-9f02-fe5b6d1de88b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.037401 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" (UID: "d9131a32-0712-419c-bf9e-3d3f6b9fa4cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.069206 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4" (OuterVolumeSpecName: "kube-api-access-xmxc4") pod "d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" (UID: "d9131a32-0712-419c-bf9e-3d3f6b9fa4cc"). InnerVolumeSpecName "kube-api-access-xmxc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.069274 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72" (OuterVolumeSpecName: "kube-api-access-zcf72") pod "ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" (UID: "ae79285c-f19f-4d6c-9f02-fe5b6d1de88b"). InnerVolumeSpecName "kube-api-access-zcf72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.069820 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf" (OuterVolumeSpecName: "kube-api-access-zkqlf") pod "64ba7793-75a1-4629-878c-9c39ed432ecc" (UID: "64ba7793-75a1-4629-878c-9c39ed432ecc"). InnerVolumeSpecName "kube-api-access-zkqlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.111355 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "64ba7793-75a1-4629-878c-9c39ed432ecc" (UID: "64ba7793-75a1-4629-878c-9c39ed432ecc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.125495 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config" (OuterVolumeSpecName: "config") pod "64ba7793-75a1-4629-878c-9c39ed432ecc" (UID: "64ba7793-75a1-4629-878c-9c39ed432ecc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137721 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137762 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmxc4\" (UniqueName: \"kubernetes.io/projected/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-kube-api-access-xmxc4\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137774 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkqlf\" (UniqueName: \"kubernetes.io/projected/64ba7793-75a1-4629-878c-9c39ed432ecc-kube-api-access-zkqlf\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137787 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137798 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137808 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.137816 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcf72\" (UniqueName: \"kubernetes.io/projected/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b-kube-api-access-zcf72\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.157920 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64ba7793-75a1-4629-878c-9c39ed432ecc" (UID: "64ba7793-75a1-4629-878c-9c39ed432ecc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.162278 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64ba7793-75a1-4629-878c-9c39ed432ecc" (UID: "64ba7793-75a1-4629-878c-9c39ed432ecc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.234601 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-l5979"] Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.238631 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.238651 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64ba7793-75a1-4629-878c-9c39ed432ecc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.402938 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2f64-account-create-update-22w48"] Dec 03 09:10:33 crc kubenswrapper[4576]: W1203 09:10:33.445976 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30211fb8_a5e9_4d09_ab11_ca3e35b5ba92.slice/crio-c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b WatchSource:0}: Error finding container c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b: Status 404 returned error can't find the container with id c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.473182 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.544824 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.587777 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-l5979" event={"ID":"cbed30d2-7863-4bab-9d8f-56a7b3650b47","Type":"ContainerStarted","Data":"b069ba5c4c73154d942e0416298314a52731ad22978974cb680553f4b9db2321"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.594955 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-899d-account-create-update-p4njf" event={"ID":"d9131a32-0712-419c-bf9e-3d3f6b9fa4cc","Type":"ContainerDied","Data":"6554691ae57c276ad7f8ef40b78dda524e7c6fef83bca238e9e9ffb9a6d5e215"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.594989 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6554691ae57c276ad7f8ef40b78dda524e7c6fef83bca238e9e9ffb9a6d5e215" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.595432 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-899d-account-create-update-p4njf" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.596907 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerStarted","Data":"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.598351 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.600560 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerStarted","Data":"cc9f6bef3fd0728bfe32fd0ac02369d31893e294e46b78cad2f1ed3e07c33ce5"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.601085 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.603770 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tjp4r" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.606626 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tjp4r" event={"ID":"02edecfe-6126-4456-bedd-6ce9a3f68ac7","Type":"ContainerDied","Data":"9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.606662 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c5240053fcc698cdf121784e0a1705a4ac97f85889149b600754088df2992d4" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.606678 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f64-account-create-update-22w48" event={"ID":"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92","Type":"ContainerStarted","Data":"c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.606899 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-rl6dz" event={"ID":"64ba7793-75a1-4629-878c-9c39ed432ecc","Type":"ContainerDied","Data":"7f28f168b85f1441a031dac79a0bfc9a4772ba77bbe5f29f9ef5b4740a69d2bd"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.606938 4576 scope.go:117] "RemoveContainer" containerID="7205b64365f2278c35b093827d946ffa68df66e83aa5c726909c5ff203f4bc5c" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.607050 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-rl6dz" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.620424 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b78nr" event={"ID":"15f066ad-3c0d-409b-9c47-e9a36cf6660d","Type":"ContainerStarted","Data":"f06d7be05a98c6de8f93da5276e18f5c38ce935018abaa854fdfc838605eb715"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.625904 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvmn2" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.626779 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1fd2-account-create-update-td6rc" event={"ID":"ae79285c-f19f-4d6c-9f02-fe5b6d1de88b","Type":"ContainerDied","Data":"9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab"} Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.626845 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fddef6b48f2e750283d6e1f001c2e1d3749d9649c9eda918940bd831c81e8ab" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.626909 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1fd2-account-create-update-td6rc" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.627384 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6p6d5" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="registry-server" containerID="cri-o://9c4f902b4f6f47edbb34a5d1709a51931f859ec90fb951677e13c4fe6d54f88a" gracePeriod=2 Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.648414 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=53.612712844 podStartE2EDuration="1m4.648394s" podCreationTimestamp="2025-12-03 09:09:29 +0000 UTC" firstStartedPulling="2025-12-03 09:09:42.088982752 +0000 UTC m=+1789.474959736" lastFinishedPulling="2025-12-03 09:09:53.124663908 +0000 UTC m=+1800.510640892" observedRunningTime="2025-12-03 09:10:33.638973881 +0000 UTC m=+1841.024950875" watchObservedRunningTime="2025-12-03 09:10:33.648394 +0000 UTC m=+1841.034370984" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.702505 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371971.152288 podStartE2EDuration="1m5.702487972s" podCreationTimestamp="2025-12-03 09:09:28 +0000 UTC" firstStartedPulling="2025-12-03 09:09:30.763804093 +0000 UTC m=+1778.149781067" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:33.700456676 +0000 UTC m=+1841.086433660" watchObservedRunningTime="2025-12-03 09:10:33.702487972 +0000 UTC m=+1841.088464946" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.735572 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.743697 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-rl6dz"] Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.814930 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-25phb" podUID="1f709485-8dc6-4e99-ba88-880d491fca2e" containerName="ovn-controller" probeResult="failure" output=< Dec 03 09:10:33 crc kubenswrapper[4576]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 09:10:33 crc kubenswrapper[4576]: > Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.863793 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:10:33 crc kubenswrapper[4576]: I1203 09:10:33.871786 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-pjp95" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.078947 4576 scope.go:117] "RemoveContainer" containerID="ebd354bd34ac54186ae1be68e1898bc185497f25a10cb3904091463245432245" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.114594 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-25phb-config-pp4st"] Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.114967 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="init" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.114986 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="init" Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.115004 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115011 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.115025 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd81d26-be9f-433b-8ec3-12afd1d85eeb" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115032 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd81d26-be9f-433b-8ec3-12afd1d85eeb" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.115044 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02edecfe-6126-4456-bedd-6ce9a3f68ac7" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115050 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="02edecfe-6126-4456-bedd-6ce9a3f68ac7" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.115063 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115070 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: E1203 09:10:34.115082 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="dnsmasq-dns" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115088 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="dnsmasq-dns" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115238 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" containerName="dnsmasq-dns" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115264 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115276 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bd81d26-be9f-433b-8ec3-12afd1d85eeb" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115291 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="02edecfe-6126-4456-bedd-6ce9a3f68ac7" containerName="mariadb-database-create" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.115299 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" containerName="mariadb-account-create-update" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.116307 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.119967 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.139291 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-25phb-config-pp4st"] Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.188390 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.191772 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpdvq\" (UniqueName: \"kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.191986 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.192081 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.192202 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.192316 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.294050 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295202 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295348 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295442 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpdvq\" (UniqueName: \"kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295651 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295759 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295129 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295714 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295770 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.295666 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.299489 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.324628 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpdvq\" (UniqueName: \"kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq\") pod \"ovn-controller-25phb-config-pp4st\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:34 crc kubenswrapper[4576]: I1203 09:10:34.523922 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.020799 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-25phb-config-pp4st"] Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.650838 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb-config-pp4st" event={"ID":"66288cdd-e607-48b5-b27f-3fa9071cfd05","Type":"ContainerStarted","Data":"59291480651a4454b6879bb359d0fc747e7ced5869f8486f2fb6c4bd13d190ee"} Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.653027 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-l5979" event={"ID":"cbed30d2-7863-4bab-9d8f-56a7b3650b47","Type":"ContainerStarted","Data":"d86147927e5661b419d74987e053f107d1833539ef35c68f7db3f0456f2fef9f"} Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.655289 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f64-account-create-update-22w48" event={"ID":"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92","Type":"ContainerStarted","Data":"500a050716ad1d31ca83b98d720f66ad8f993bf146e0706c19465c307b57439d"} Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.689343 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-l5979" podStartSLOduration=7.689321295 podStartE2EDuration="7.689321295s" podCreationTimestamp="2025-12-03 09:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:35.681779618 +0000 UTC m=+1843.067756612" watchObservedRunningTime="2025-12-03 09:10:35.689321295 +0000 UTC m=+1843.075298289" Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.690121 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64ba7793-75a1-4629-878c-9c39ed432ecc" path="/var/lib/kubelet/pods/64ba7793-75a1-4629-878c-9c39ed432ecc/volumes" Dec 03 09:10:35 crc kubenswrapper[4576]: I1203 09:10:35.711383 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-2f64-account-create-update-22w48" podStartSLOduration=7.711362528 podStartE2EDuration="7.711362528s" podCreationTimestamp="2025-12-03 09:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:35.704286484 +0000 UTC m=+1843.090263468" watchObservedRunningTime="2025-12-03 09:10:35.711362528 +0000 UTC m=+1843.097339512" Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.665543 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb-config-pp4st" event={"ID":"66288cdd-e607-48b5-b27f-3fa9071cfd05","Type":"ContainerStarted","Data":"2d1f5c2ea88e80bf4769b167f32890e7eaa3326329a0fbbdc18f45d4c7f81136"} Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.669046 4576 generic.go:334] "Generic (PLEG): container finished" podID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerID="9c4f902b4f6f47edbb34a5d1709a51931f859ec90fb951677e13c4fe6d54f88a" exitCode=0 Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.669131 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerDied","Data":"9c4f902b4f6f47edbb34a5d1709a51931f859ec90fb951677e13c4fe6d54f88a"} Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.671866 4576 generic.go:334] "Generic (PLEG): container finished" podID="15f066ad-3c0d-409b-9c47-e9a36cf6660d" containerID="f06d7be05a98c6de8f93da5276e18f5c38ce935018abaa854fdfc838605eb715" exitCode=0 Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.671927 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b78nr" event={"ID":"15f066ad-3c0d-409b-9c47-e9a36cf6660d","Type":"ContainerDied","Data":"f06d7be05a98c6de8f93da5276e18f5c38ce935018abaa854fdfc838605eb715"} Dec 03 09:10:36 crc kubenswrapper[4576]: I1203 09:10:36.696415 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-25phb-config-pp4st" podStartSLOduration=2.6963899099999997 podStartE2EDuration="2.69638991s" podCreationTimestamp="2025-12-03 09:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:36.690006665 +0000 UTC m=+1844.075983669" watchObservedRunningTime="2025-12-03 09:10:36.69638991 +0000 UTC m=+1844.082366894" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.211703 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.280899 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities\") pod \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.281019 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdq74\" (UniqueName: \"kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74\") pod \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.281335 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content\") pod \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\" (UID: \"4710c074-d5a4-4fdf-9d10-e0f0934c2e44\") " Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.286329 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities" (OuterVolumeSpecName: "utilities") pod "4710c074-d5a4-4fdf-9d10-e0f0934c2e44" (UID: "4710c074-d5a4-4fdf-9d10-e0f0934c2e44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.317876 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74" (OuterVolumeSpecName: "kube-api-access-rdq74") pod "4710c074-d5a4-4fdf-9d10-e0f0934c2e44" (UID: "4710c074-d5a4-4fdf-9d10-e0f0934c2e44"). InnerVolumeSpecName "kube-api-access-rdq74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.342774 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4710c074-d5a4-4fdf-9d10-e0f0934c2e44" (UID: "4710c074-d5a4-4fdf-9d10-e0f0934c2e44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.383925 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.384211 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdq74\" (UniqueName: \"kubernetes.io/projected/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-kube-api-access-rdq74\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.384871 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4710c074-d5a4-4fdf-9d10-e0f0934c2e44-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.683057 4576 generic.go:334] "Generic (PLEG): container finished" podID="66288cdd-e607-48b5-b27f-3fa9071cfd05" containerID="2d1f5c2ea88e80bf4769b167f32890e7eaa3326329a0fbbdc18f45d4c7f81136" exitCode=0 Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.685690 4576 generic.go:334] "Generic (PLEG): container finished" podID="801bf024-9418-42e1-893f-0a4b82d411b4" containerID="dc9c6166efda1fe8bc600a8503a3a36d653e21cf1ef6c336eac9b9fba5c0d641" exitCode=0 Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.688844 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6p6d5" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.693377 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb-config-pp4st" event={"ID":"66288cdd-e607-48b5-b27f-3fa9071cfd05","Type":"ContainerDied","Data":"2d1f5c2ea88e80bf4769b167f32890e7eaa3326329a0fbbdc18f45d4c7f81136"} Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.693438 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-649tl" event={"ID":"801bf024-9418-42e1-893f-0a4b82d411b4","Type":"ContainerDied","Data":"dc9c6166efda1fe8bc600a8503a3a36d653e21cf1ef6c336eac9b9fba5c0d641"} Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.693462 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6p6d5" event={"ID":"4710c074-d5a4-4fdf-9d10-e0f0934c2e44","Type":"ContainerDied","Data":"04fb02eeb19349e6bc1539d306d3e9a5693a74650d26253172d47fad9860c357"} Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.693507 4576 scope.go:117] "RemoveContainer" containerID="9c4f902b4f6f47edbb34a5d1709a51931f859ec90fb951677e13c4fe6d54f88a" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.745276 4576 scope.go:117] "RemoveContainer" containerID="12df4d239475c86c3a581951d92d16df38830cca374d0332c6fc471013a5b2e4" Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.834958 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.847114 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6p6d5"] Dec 03 09:10:37 crc kubenswrapper[4576]: I1203 09:10:37.854120 4576 scope.go:117] "RemoveContainer" containerID="6236ce1d8bd4be8d1709db3ce9ae473e4211c8970d1f76e2119ce41cc688aee4" Dec 03 09:10:38 crc kubenswrapper[4576]: I1203 09:10:38.705607 4576 generic.go:334] "Generic (PLEG): container finished" podID="cbed30d2-7863-4bab-9d8f-56a7b3650b47" containerID="d86147927e5661b419d74987e053f107d1833539ef35c68f7db3f0456f2fef9f" exitCode=0 Dec 03 09:10:38 crc kubenswrapper[4576]: I1203 09:10:38.705756 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-l5979" event={"ID":"cbed30d2-7863-4bab-9d8f-56a7b3650b47","Type":"ContainerDied","Data":"d86147927e5661b419d74987e053f107d1833539ef35c68f7db3f0456f2fef9f"} Dec 03 09:10:38 crc kubenswrapper[4576]: I1203 09:10:38.815142 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-25phb" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.236352 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.243579 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362118 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362171 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362214 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362276 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362320 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363110 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpdvq\" (UniqueName: \"kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362348 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run" (OuterVolumeSpecName: "var-run") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363150 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.362921 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363169 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363190 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363231 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363277 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363314 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363334 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zvxv\" (UniqueName: \"kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv\") pod \"801bf024-9418-42e1-893f-0a4b82d411b4\" (UID: \"801bf024-9418-42e1-893f-0a4b82d411b4\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363359 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn\") pod \"66288cdd-e607-48b5-b27f-3fa9071cfd05\" (UID: \"66288cdd-e607-48b5-b27f-3fa9071cfd05\") " Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363419 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363439 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.363832 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts" (OuterVolumeSpecName: "scripts") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364760 4576 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364780 4576 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364790 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364798 4576 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/801bf024-9418-42e1-893f-0a4b82d411b4-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364807 4576 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364819 4576 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/66288cdd-e607-48b5-b27f-3fa9071cfd05-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.364972 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.375727 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq" (OuterVolumeSpecName: "kube-api-access-kpdvq") pod "66288cdd-e607-48b5-b27f-3fa9071cfd05" (UID: "66288cdd-e607-48b5-b27f-3fa9071cfd05"). InnerVolumeSpecName "kube-api-access-kpdvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.387002 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv" (OuterVolumeSpecName: "kube-api-access-6zvxv") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "kube-api-access-6zvxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.403201 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.407709 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.420982 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts" (OuterVolumeSpecName: "scripts") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.422907 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "801bf024-9418-42e1-893f-0a4b82d411b4" (UID: "801bf024-9418-42e1-893f-0a4b82d411b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465489 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465549 4576 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/66288cdd-e607-48b5-b27f-3fa9071cfd05-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465564 4576 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465575 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zvxv\" (UniqueName: \"kubernetes.io/projected/801bf024-9418-42e1-893f-0a4b82d411b4-kube-api-access-6zvxv\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465590 4576 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/801bf024-9418-42e1-893f-0a4b82d411b4-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465601 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpdvq\" (UniqueName: \"kubernetes.io/projected/66288cdd-e607-48b5-b27f-3fa9071cfd05-kube-api-access-kpdvq\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.465612 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/801bf024-9418-42e1-893f-0a4b82d411b4-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.688970 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" path="/var/lib/kubelet/pods/4710c074-d5a4-4fdf-9d10-e0f0934c2e44/volumes" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.714621 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b78nr" event={"ID":"15f066ad-3c0d-409b-9c47-e9a36cf6660d","Type":"ContainerStarted","Data":"ea6b1022bfd89cd475509761dc5b1f998bcee39d4e203ae5fd714d5d36c5b88e"} Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.720564 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-25phb-config-pp4st" event={"ID":"66288cdd-e607-48b5-b27f-3fa9071cfd05","Type":"ContainerDied","Data":"59291480651a4454b6879bb359d0fc747e7ced5869f8486f2fb6c4bd13d190ee"} Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.720603 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59291480651a4454b6879bb359d0fc747e7ced5869f8486f2fb6c4bd13d190ee" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.720660 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-25phb-config-pp4st" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.722442 4576 generic.go:334] "Generic (PLEG): container finished" podID="30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" containerID="500a050716ad1d31ca83b98d720f66ad8f993bf146e0706c19465c307b57439d" exitCode=0 Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.722512 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f64-account-create-update-22w48" event={"ID":"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92","Type":"ContainerDied","Data":"500a050716ad1d31ca83b98d720f66ad8f993bf146e0706c19465c307b57439d"} Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.726054 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-649tl" event={"ID":"801bf024-9418-42e1-893f-0a4b82d411b4","Type":"ContainerDied","Data":"a09fe7d150f4d26cee19e2f8ec7f350958846a5809bf1e6a4dbd024fb26fc318"} Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.726111 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a09fe7d150f4d26cee19e2f8ec7f350958846a5809bf1e6a4dbd024fb26fc318" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.726127 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-649tl" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.749186 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b78nr" podStartSLOduration=4.186875222 podStartE2EDuration="24.749158921s" podCreationTimestamp="2025-12-03 09:10:15 +0000 UTC" firstStartedPulling="2025-12-03 09:10:18.338869514 +0000 UTC m=+1825.724846498" lastFinishedPulling="2025-12-03 09:10:38.901153213 +0000 UTC m=+1846.287130197" observedRunningTime="2025-12-03 09:10:39.742251872 +0000 UTC m=+1847.128228886" watchObservedRunningTime="2025-12-03 09:10:39.749158921 +0000 UTC m=+1847.135135905" Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.856591 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-25phb-config-pp4st"] Dec 03 09:10:39 crc kubenswrapper[4576]: I1203 09:10:39.866690 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-25phb-config-pp4st"] Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.123055 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-l5979" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.175954 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts\") pod \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.176094 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft6gn\" (UniqueName: \"kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn\") pod \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\" (UID: \"cbed30d2-7863-4bab-9d8f-56a7b3650b47\") " Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.176707 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cbed30d2-7863-4bab-9d8f-56a7b3650b47" (UID: "cbed30d2-7863-4bab-9d8f-56a7b3650b47"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.188464 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn" (OuterVolumeSpecName: "kube-api-access-ft6gn") pod "cbed30d2-7863-4bab-9d8f-56a7b3650b47" (UID: "cbed30d2-7863-4bab-9d8f-56a7b3650b47"). InnerVolumeSpecName "kube-api-access-ft6gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.279285 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft6gn\" (UniqueName: \"kubernetes.io/projected/cbed30d2-7863-4bab-9d8f-56a7b3650b47-kube-api-access-ft6gn\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.279327 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbed30d2-7863-4bab-9d8f-56a7b3650b47-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.734697 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-l5979" Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.734790 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-l5979" event={"ID":"cbed30d2-7863-4bab-9d8f-56a7b3650b47","Type":"ContainerDied","Data":"b069ba5c4c73154d942e0416298314a52731ad22978974cb680553f4b9db2321"} Dec 03 09:10:40 crc kubenswrapper[4576]: I1203 09:10:40.735186 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b069ba5c4c73154d942e0416298314a52731ad22978974cb680553f4b9db2321" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.056649 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.194670 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbvxf\" (UniqueName: \"kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf\") pod \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.194739 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts\") pod \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\" (UID: \"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92\") " Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.195368 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" (UID: "30211fb8-a5e9-4d09-ab11-ca3e35b5ba92"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.196022 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.199353 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf" (OuterVolumeSpecName: "kube-api-access-rbvxf") pod "30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" (UID: "30211fb8-a5e9-4d09-ab11-ca3e35b5ba92"). InnerVolumeSpecName "kube-api-access-rbvxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.297758 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbvxf\" (UniqueName: \"kubernetes.io/projected/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92-kube-api-access-rbvxf\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.687594 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66288cdd-e607-48b5-b27f-3fa9071cfd05" path="/var/lib/kubelet/pods/66288cdd-e607-48b5-b27f-3fa9071cfd05/volumes" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.743329 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f64-account-create-update-22w48" event={"ID":"30211fb8-a5e9-4d09-ab11-ca3e35b5ba92","Type":"ContainerDied","Data":"c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b"} Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.743480 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7fc2f50b4583d0d2e8940da4db7229463288d158235986be9159a6d6741113b" Dec 03 09:10:41 crc kubenswrapper[4576]: I1203 09:10:41.744080 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f64-account-create-update-22w48" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.134074 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-9dp49"] Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.135557 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801bf024-9418-42e1-893f-0a4b82d411b4" containerName="swift-ring-rebalance" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.135663 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="801bf024-9418-42e1-893f-0a4b82d411b4" containerName="swift-ring-rebalance" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.135747 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66288cdd-e607-48b5-b27f-3fa9071cfd05" containerName="ovn-config" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.135811 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="66288cdd-e607-48b5-b27f-3fa9071cfd05" containerName="ovn-config" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.135877 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbed30d2-7863-4bab-9d8f-56a7b3650b47" containerName="mariadb-database-create" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.135936 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbed30d2-7863-4bab-9d8f-56a7b3650b47" containerName="mariadb-database-create" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.136005 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="extract-utilities" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136066 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="extract-utilities" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.136127 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" containerName="mariadb-account-create-update" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136185 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" containerName="mariadb-account-create-update" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.136285 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="registry-server" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136338 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="registry-server" Dec 03 09:10:44 crc kubenswrapper[4576]: E1203 09:10:44.136399 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="extract-content" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136457 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="extract-content" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136698 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4710c074-d5a4-4fdf-9d10-e0f0934c2e44" containerName="registry-server" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136774 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="801bf024-9418-42e1-893f-0a4b82d411b4" containerName="swift-ring-rebalance" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136841 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" containerName="mariadb-account-create-update" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136903 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="66288cdd-e607-48b5-b27f-3fa9071cfd05" containerName="ovn-config" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.136963 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbed30d2-7863-4bab-9d8f-56a7b3650b47" containerName="mariadb-database-create" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.137553 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.142408 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.154597 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-n9x4j" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.154820 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9dp49"] Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.155697 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.155836 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.155941 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.156040 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwdf4\" (UniqueName: \"kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.257217 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.257321 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.257372 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.257419 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwdf4\" (UniqueName: \"kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.263678 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.264564 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.268206 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.278413 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwdf4\" (UniqueName: \"kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4\") pod \"glance-db-sync-9dp49\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:44 crc kubenswrapper[4576]: I1203 09:10:44.455013 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9dp49" Dec 03 09:10:45 crc kubenswrapper[4576]: W1203 09:10:44.886658 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fd5058c_a47f_46cd_b7a7_d6d02014da6e.slice/crio-0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73 WatchSource:0}: Error finding container 0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73: Status 404 returned error can't find the container with id 0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73 Dec 03 09:10:45 crc kubenswrapper[4576]: I1203 09:10:44.892947 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9dp49"] Dec 03 09:10:45 crc kubenswrapper[4576]: I1203 09:10:45.677405 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:10:45 crc kubenswrapper[4576]: E1203 09:10:45.677744 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:10:45 crc kubenswrapper[4576]: I1203 09:10:45.790308 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9dp49" event={"ID":"9fd5058c-a47f-46cd-b7a7-d6d02014da6e","Type":"ContainerStarted","Data":"0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73"} Dec 03 09:10:46 crc kubenswrapper[4576]: I1203 09:10:46.610694 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:46 crc kubenswrapper[4576]: I1203 09:10:46.610841 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:46 crc kubenswrapper[4576]: I1203 09:10:46.689250 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:46 crc kubenswrapper[4576]: I1203 09:10:46.859507 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b78nr" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.011026 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b78nr"] Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.186043 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.186356 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pgd6q" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="registry-server" containerID="cri-o://6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3" gracePeriod=2 Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.654365 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.728741 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg58z\" (UniqueName: \"kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z\") pod \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.728785 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities\") pod \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.728829 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content\") pod \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\" (UID: \"df2f6436-e933-49ce-8bad-aaa35e1fe3ec\") " Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.736172 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities" (OuterVolumeSpecName: "utilities") pod "df2f6436-e933-49ce-8bad-aaa35e1fe3ec" (UID: "df2f6436-e933-49ce-8bad-aaa35e1fe3ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.746811 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z" (OuterVolumeSpecName: "kube-api-access-sg58z") pod "df2f6436-e933-49ce-8bad-aaa35e1fe3ec" (UID: "df2f6436-e933-49ce-8bad-aaa35e1fe3ec"). InnerVolumeSpecName "kube-api-access-sg58z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.824802 4576 generic.go:334] "Generic (PLEG): container finished" podID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerID="6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3" exitCode=0 Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.825762 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pgd6q" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.826232 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerDied","Data":"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3"} Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.826270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pgd6q" event={"ID":"df2f6436-e933-49ce-8bad-aaa35e1fe3ec","Type":"ContainerDied","Data":"3f03b4a94b24854614229489d110c470d5a9ca0c6f4407903013a3359ac7fbe8"} Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.826292 4576 scope.go:117] "RemoveContainer" containerID="6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.830061 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg58z\" (UniqueName: \"kubernetes.io/projected/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-kube-api-access-sg58z\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.830091 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.847273 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df2f6436-e933-49ce-8bad-aaa35e1fe3ec" (UID: "df2f6436-e933-49ce-8bad-aaa35e1fe3ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.859850 4576 scope.go:117] "RemoveContainer" containerID="37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.887763 4576 scope.go:117] "RemoveContainer" containerID="a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.931728 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df2f6436-e933-49ce-8bad-aaa35e1fe3ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.943054 4576 scope.go:117] "RemoveContainer" containerID="6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3" Dec 03 09:10:47 crc kubenswrapper[4576]: E1203 09:10:47.943981 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3\": container with ID starting with 6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3 not found: ID does not exist" containerID="6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.944010 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3"} err="failed to get container status \"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3\": rpc error: code = NotFound desc = could not find container \"6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3\": container with ID starting with 6e8ed77a9a73f2f5fcb65cd488eeed31f6095ac23eec812f2e48face160c7aa3 not found: ID does not exist" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.944032 4576 scope.go:117] "RemoveContainer" containerID="37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e" Dec 03 09:10:47 crc kubenswrapper[4576]: E1203 09:10:47.944449 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e\": container with ID starting with 37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e not found: ID does not exist" containerID="37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.944481 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e"} err="failed to get container status \"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e\": rpc error: code = NotFound desc = could not find container \"37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e\": container with ID starting with 37f8686239a64087a0f9176fdcc1da093e00abee9a52868a94f764ecd53d399e not found: ID does not exist" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.944499 4576 scope.go:117] "RemoveContainer" containerID="a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4" Dec 03 09:10:47 crc kubenswrapper[4576]: E1203 09:10:47.944891 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4\": container with ID starting with a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4 not found: ID does not exist" containerID="a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4" Dec 03 09:10:47 crc kubenswrapper[4576]: I1203 09:10:47.944917 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4"} err="failed to get container status \"a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4\": rpc error: code = NotFound desc = could not find container \"a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4\": container with ID starting with a4ca733aec785da18a51fe6f120ff778f299d01fbe443ad98210ac9a46bdc0b4 not found: ID does not exist" Dec 03 09:10:48 crc kubenswrapper[4576]: I1203 09:10:48.177634 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 09:10:48 crc kubenswrapper[4576]: I1203 09:10:48.187117 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pgd6q"] Dec 03 09:10:48 crc kubenswrapper[4576]: I1203 09:10:48.851455 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:48 crc kubenswrapper[4576]: I1203 09:10:48.857732 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8e0694dc-a49e-4136-a206-3bb5c8acd48c-etc-swift\") pod \"swift-storage-0\" (UID: \"8e0694dc-a49e-4136-a206-3bb5c8acd48c\") " pod="openstack/swift-storage-0" Dec 03 09:10:48 crc kubenswrapper[4576]: I1203 09:10:48.979837 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 09:10:49 crc kubenswrapper[4576]: I1203 09:10:49.582127 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 09:10:49 crc kubenswrapper[4576]: W1203 09:10:49.591073 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e0694dc_a49e_4136_a206_3bb5c8acd48c.slice/crio-d784d1f25c53f85e1f04e3f0dd1e3fc99e91d1076cf67489e661d1aa1b212ca0 WatchSource:0}: Error finding container d784d1f25c53f85e1f04e3f0dd1e3fc99e91d1076cf67489e661d1aa1b212ca0: Status 404 returned error can't find the container with id d784d1f25c53f85e1f04e3f0dd1e3fc99e91d1076cf67489e661d1aa1b212ca0 Dec 03 09:10:49 crc kubenswrapper[4576]: I1203 09:10:49.686838 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" path="/var/lib/kubelet/pods/df2f6436-e933-49ce-8bad-aaa35e1fe3ec/volumes" Dec 03 09:10:49 crc kubenswrapper[4576]: I1203 09:10:49.864263 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"d784d1f25c53f85e1f04e3f0dd1e3fc99e91d1076cf67489e661d1aa1b212ca0"} Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.182807 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.585733 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.947771 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-gmdx8"] Dec 03 09:10:50 crc kubenswrapper[4576]: E1203 09:10:50.948152 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="extract-content" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.948164 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="extract-content" Dec 03 09:10:50 crc kubenswrapper[4576]: E1203 09:10:50.948175 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="extract-utilities" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.948181 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="extract-utilities" Dec 03 09:10:50 crc kubenswrapper[4576]: E1203 09:10:50.948201 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="registry-server" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.948207 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="registry-server" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.948356 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="df2f6436-e933-49ce-8bad-aaa35e1fe3ec" containerName="registry-server" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.948971 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:50 crc kubenswrapper[4576]: I1203 09:10:50.959108 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-gmdx8"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.033966 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-cf28-account-create-update-4g5wv"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.035143 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.039549 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.071354 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cf28-account-create-update-4g5wv"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.125487 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f7z9\" (UniqueName: \"kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.125554 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.130695 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-8s9cm"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.133737 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.160722 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-8763-account-create-update-q2496"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.161868 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.165470 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.178369 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8s9cm"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.214673 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8763-account-create-update-q2496"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.228048 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f7z9\" (UniqueName: \"kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.228095 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.228138 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89m29\" (UniqueName: \"kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.228207 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.229696 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.251367 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f7z9\" (UniqueName: \"kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9\") pod \"cinder-db-create-gmdx8\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.269458 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gmdx8" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.324709 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-vwggs"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.326007 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329446 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8txtp\" (UniqueName: \"kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329512 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs8xd\" (UniqueName: \"kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329583 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89m29\" (UniqueName: \"kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329626 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329689 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.329713 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.330674 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.340442 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vwggs"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.355661 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xpx9m"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.356695 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.360738 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.370306 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.370945 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.371559 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8h9rk" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.381611 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xpx9m"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.412092 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89m29\" (UniqueName: \"kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29\") pod \"barbican-cf28-account-create-update-4g5wv\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434264 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434312 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434417 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8txtp\" (UniqueName: \"kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434461 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs8xd\" (UniqueName: \"kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434504 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-554wv\" (UniqueName: \"kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.434569 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.435307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.435401 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.467165 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs8xd\" (UniqueName: \"kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd\") pod \"barbican-db-create-8s9cm\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.475629 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-917b-account-create-update-5fjkf"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.479570 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8txtp\" (UniqueName: \"kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp\") pod \"cinder-8763-account-create-update-q2496\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.483631 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.490093 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.503630 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.536120 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-554wv\" (UniqueName: \"kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.536205 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.536318 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.537066 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.537105 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-917b-account-create-update-5fjkf"] Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.537176 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldj7s\" (UniqueName: \"kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.537275 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.557014 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-554wv\" (UniqueName: \"kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv\") pod \"neutron-db-create-vwggs\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.644586 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.644677 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.644761 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldj7s\" (UniqueName: \"kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.644780 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.644809 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmp4p\" (UniqueName: \"kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.648615 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.649555 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.663950 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.674866 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldj7s\" (UniqueName: \"kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s\") pod \"keystone-db-sync-xpx9m\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.746312 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmp4p\" (UniqueName: \"kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.746451 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.746666 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vwggs" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.748106 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.753900 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8s9cm" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.768825 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmp4p\" (UniqueName: \"kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p\") pod \"neutron-917b-account-create-update-5fjkf\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.821368 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.839350 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:10:51 crc kubenswrapper[4576]: I1203 09:10:51.958815 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-gmdx8"] Dec 03 09:10:52 crc kubenswrapper[4576]: W1203 09:10:52.003985 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fe42276_9012_4675_8196_28962d473856.slice/crio-7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3 WatchSource:0}: Error finding container 7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3: Status 404 returned error can't find the container with id 7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3 Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.184592 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8763-account-create-update-q2496"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.343443 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cf28-account-create-update-4g5wv"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.409992 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vwggs"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.480201 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-917b-account-create-update-5fjkf"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.746477 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8s9cm"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.776065 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xpx9m"] Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.898845 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gmdx8" event={"ID":"0fe42276-9012-4675-8196-28962d473856","Type":"ContainerStarted","Data":"f1caa22025532f0bd1b4f5cb091802b821fe7db9c3223c8c3761b48579ed77ba"} Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.898884 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gmdx8" event={"ID":"0fe42276-9012-4675-8196-28962d473856","Type":"ContainerStarted","Data":"7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3"} Dec 03 09:10:52 crc kubenswrapper[4576]: I1203 09:10:52.916706 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-gmdx8" podStartSLOduration=2.916682605 podStartE2EDuration="2.916682605s" podCreationTimestamp="2025-12-03 09:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:52.909283082 +0000 UTC m=+1860.295260066" watchObservedRunningTime="2025-12-03 09:10:52.916682605 +0000 UTC m=+1860.302659589" Dec 03 09:10:53 crc kubenswrapper[4576]: W1203 09:10:53.168392 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce003078_de4d_4722_9e0d_3d2570c008cc.slice/crio-9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6 WatchSource:0}: Error finding container 9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6: Status 404 returned error can't find the container with id 9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6 Dec 03 09:10:53 crc kubenswrapper[4576]: W1203 09:10:53.181048 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef9dad13_73f8_4391_b2e8_3a043cff74c0.slice/crio-3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471 WatchSource:0}: Error finding container 3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471: Status 404 returned error can't find the container with id 3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471 Dec 03 09:10:53 crc kubenswrapper[4576]: W1203 09:10:53.188381 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8d27ef6_3306_410d_96ee_0648446c759f.slice/crio-4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0 WatchSource:0}: Error finding container 4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0: Status 404 returned error can't find the container with id 4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0 Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.914814 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vwggs" event={"ID":"bc770216-f893-46e0-8e36-1c60a1094e82","Type":"ContainerStarted","Data":"dcb30e7fa2b0010cf91db3a2e9f1119023fe58a82064663f2722e43ae8cfe0f1"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.916852 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xpx9m" event={"ID":"9361513a-8494-48cb-9d68-43a57f2d679f","Type":"ContainerStarted","Data":"9f399ec9e6360a0ddcd7a15653e261c6537bcd87a7affa843613bfa3cefcf3c0"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.927943 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8s9cm" event={"ID":"a8d27ef6-3306-410d-96ee-0648446c759f","Type":"ContainerStarted","Data":"4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.937353 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8763-account-create-update-q2496" event={"ID":"ce003078-de4d-4722-9e0d-3d2570c008cc","Type":"ContainerStarted","Data":"657cd9f7ffccc31161afffbfea5a81a1f112d5becb3d275cc47c207def3aaa2d"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.937410 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8763-account-create-update-q2496" event={"ID":"ce003078-de4d-4722-9e0d-3d2570c008cc","Type":"ContainerStarted","Data":"9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.939623 4576 generic.go:334] "Generic (PLEG): container finished" podID="0fe42276-9012-4675-8196-28962d473856" containerID="f1caa22025532f0bd1b4f5cb091802b821fe7db9c3223c8c3761b48579ed77ba" exitCode=0 Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.939707 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gmdx8" event={"ID":"0fe42276-9012-4675-8196-28962d473856","Type":"ContainerDied","Data":"f1caa22025532f0bd1b4f5cb091802b821fe7db9c3223c8c3761b48579ed77ba"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.941022 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-917b-account-create-update-5fjkf" event={"ID":"54df14dc-89cd-4940-aed5-0ba757f5294c","Type":"ContainerStarted","Data":"c1fcc174830805d2c16b57daafc558874309d2093f423951c5818cf32188677b"} Dec 03 09:10:53 crc kubenswrapper[4576]: I1203 09:10:53.944854 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cf28-account-create-update-4g5wv" event={"ID":"ef9dad13-73f8-4391-b2e8-3a043cff74c0","Type":"ContainerStarted","Data":"3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.963160 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-917b-account-create-update-5fjkf" event={"ID":"54df14dc-89cd-4940-aed5-0ba757f5294c","Type":"ContainerStarted","Data":"081af4196e1a1809140be15b136e39ea2e28ed36c4b6794ba5befae71b96d392"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.968058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cf28-account-create-update-4g5wv" event={"ID":"ef9dad13-73f8-4391-b2e8-3a043cff74c0","Type":"ContainerStarted","Data":"ffb52e41f74d7e9b5eaaef727bc32875c9aff05b43fe48166609403a91341fa8"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.969681 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vwggs" event={"ID":"bc770216-f893-46e0-8e36-1c60a1094e82","Type":"ContainerStarted","Data":"dc07261f74649d362d3351fac984da289f993337fc38e99219bccd5eedea564e"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.971535 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8s9cm" event={"ID":"a8d27ef6-3306-410d-96ee-0648446c759f","Type":"ContainerStarted","Data":"1f212dd4d2bee4f19bfb6b7ff536eb6d4141d5b26ba895a23414ff6104c6f4ef"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.974159 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"100ea1d6161cabad6303193a7191fa015cfc0542b76300dfc993bf3cacb47421"} Dec 03 09:10:54 crc kubenswrapper[4576]: I1203 09:10:54.986339 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-917b-account-create-update-5fjkf" podStartSLOduration=3.986318067 podStartE2EDuration="3.986318067s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:54.974878014 +0000 UTC m=+1862.360854998" watchObservedRunningTime="2025-12-03 09:10:54.986318067 +0000 UTC m=+1862.372295051" Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:54.999356 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-cf28-account-create-update-4g5wv" podStartSLOduration=3.9993360730000003 podStartE2EDuration="3.999336073s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:54.990182053 +0000 UTC m=+1862.376159037" watchObservedRunningTime="2025-12-03 09:10:54.999336073 +0000 UTC m=+1862.385313057" Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:55.011935 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-vwggs" podStartSLOduration=4.011916328 podStartE2EDuration="4.011916328s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:55.001663097 +0000 UTC m=+1862.387640081" watchObservedRunningTime="2025-12-03 09:10:55.011916328 +0000 UTC m=+1862.397893312" Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:55.029915 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-8763-account-create-update-q2496" podStartSLOduration=4.02989588 podStartE2EDuration="4.02989588s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:55.022365344 +0000 UTC m=+1862.408342318" watchObservedRunningTime="2025-12-03 09:10:55.02989588 +0000 UTC m=+1862.415872864" Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:55.047489 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-8s9cm" podStartSLOduration=4.047467331 podStartE2EDuration="4.047467331s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:10:55.035831053 +0000 UTC m=+1862.421808057" watchObservedRunningTime="2025-12-03 09:10:55.047467331 +0000 UTC m=+1862.433444315" Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:55.994007 4576 generic.go:334] "Generic (PLEG): container finished" podID="a8d27ef6-3306-410d-96ee-0648446c759f" containerID="1f212dd4d2bee4f19bfb6b7ff536eb6d4141d5b26ba895a23414ff6104c6f4ef" exitCode=0 Dec 03 09:10:55 crc kubenswrapper[4576]: I1203 09:10:55.994144 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8s9cm" event={"ID":"a8d27ef6-3306-410d-96ee-0648446c759f","Type":"ContainerDied","Data":"1f212dd4d2bee4f19bfb6b7ff536eb6d4141d5b26ba895a23414ff6104c6f4ef"} Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.003999 4576 generic.go:334] "Generic (PLEG): container finished" podID="ce003078-de4d-4722-9e0d-3d2570c008cc" containerID="657cd9f7ffccc31161afffbfea5a81a1f112d5becb3d275cc47c207def3aaa2d" exitCode=0 Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.004105 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8763-account-create-update-q2496" event={"ID":"ce003078-de4d-4722-9e0d-3d2570c008cc","Type":"ContainerDied","Data":"657cd9f7ffccc31161afffbfea5a81a1f112d5becb3d275cc47c207def3aaa2d"} Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.007345 4576 generic.go:334] "Generic (PLEG): container finished" podID="54df14dc-89cd-4940-aed5-0ba757f5294c" containerID="081af4196e1a1809140be15b136e39ea2e28ed36c4b6794ba5befae71b96d392" exitCode=0 Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.007405 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-917b-account-create-update-5fjkf" event={"ID":"54df14dc-89cd-4940-aed5-0ba757f5294c","Type":"ContainerDied","Data":"081af4196e1a1809140be15b136e39ea2e28ed36c4b6794ba5befae71b96d392"} Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.009976 4576 generic.go:334] "Generic (PLEG): container finished" podID="ef9dad13-73f8-4391-b2e8-3a043cff74c0" containerID="ffb52e41f74d7e9b5eaaef727bc32875c9aff05b43fe48166609403a91341fa8" exitCode=0 Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.010040 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cf28-account-create-update-4g5wv" event={"ID":"ef9dad13-73f8-4391-b2e8-3a043cff74c0","Type":"ContainerDied","Data":"ffb52e41f74d7e9b5eaaef727bc32875c9aff05b43fe48166609403a91341fa8"} Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.011750 4576 generic.go:334] "Generic (PLEG): container finished" podID="bc770216-f893-46e0-8e36-1c60a1094e82" containerID="dc07261f74649d362d3351fac984da289f993337fc38e99219bccd5eedea564e" exitCode=0 Dec 03 09:10:57 crc kubenswrapper[4576]: I1203 09:10:57.011905 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vwggs" event={"ID":"bc770216-f893-46e0-8e36-1c60a1094e82","Type":"ContainerDied","Data":"dc07261f74649d362d3351fac984da289f993337fc38e99219bccd5eedea564e"} Dec 03 09:10:59 crc kubenswrapper[4576]: I1203 09:10:59.678342 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:10:59 crc kubenswrapper[4576]: E1203 09:10:59.679305 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:11:05 crc kubenswrapper[4576]: E1203 09:11:05.200169 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 03 09:11:05 crc kubenswrapper[4576]: E1203 09:11:05.200763 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zwdf4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-9dp49_openstack(9fd5058c-a47f-46cd-b7a7-d6d02014da6e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:11:05 crc kubenswrapper[4576]: E1203 09:11:05.202187 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-9dp49" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.240887 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.329487 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts\") pod \"ce003078-de4d-4722-9e0d-3d2570c008cc\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.329552 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8txtp\" (UniqueName: \"kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp\") pod \"ce003078-de4d-4722-9e0d-3d2570c008cc\" (UID: \"ce003078-de4d-4722-9e0d-3d2570c008cc\") " Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.332420 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce003078-de4d-4722-9e0d-3d2570c008cc" (UID: "ce003078-de4d-4722-9e0d-3d2570c008cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.337565 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp" (OuterVolumeSpecName: "kube-api-access-8txtp") pod "ce003078-de4d-4722-9e0d-3d2570c008cc" (UID: "ce003078-de4d-4722-9e0d-3d2570c008cc"). InnerVolumeSpecName "kube-api-access-8txtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.431800 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce003078-de4d-4722-9e0d-3d2570c008cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:05 crc kubenswrapper[4576]: I1203 09:11:05.431846 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8txtp\" (UniqueName: \"kubernetes.io/projected/ce003078-de4d-4722-9e0d-3d2570c008cc-kube-api-access-8txtp\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:06 crc kubenswrapper[4576]: I1203 09:11:06.089176 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8763-account-create-update-q2496" event={"ID":"ce003078-de4d-4722-9e0d-3d2570c008cc","Type":"ContainerDied","Data":"9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6"} Dec 03 09:11:06 crc kubenswrapper[4576]: I1203 09:11:06.089495 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b6ae13813938e48802c787162be6c9cb9f2432d0a93fc83dc69de2abf7d4fa6" Dec 03 09:11:06 crc kubenswrapper[4576]: I1203 09:11:06.089217 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8763-account-create-update-q2496" Dec 03 09:11:06 crc kubenswrapper[4576]: E1203 09:11:06.091405 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-9dp49" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" Dec 03 09:11:08 crc kubenswrapper[4576]: E1203 09:11:08.181107 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Dec 03 09:11:08 crc kubenswrapper[4576]: E1203 09:11:08.183280 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldj7s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-xpx9m_openstack(9361513a-8494-48cb-9d68-43a57f2d679f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:11:08 crc kubenswrapper[4576]: E1203 09:11:08.185789 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/keystone-db-sync-xpx9m" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.327432 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.334454 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vwggs" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.378673 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.385861 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gmdx8" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.390599 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8s9cm" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483600 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts\") pod \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483696 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts\") pod \"54df14dc-89cd-4940-aed5-0ba757f5294c\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483745 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89m29\" (UniqueName: \"kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29\") pod \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\" (UID: \"ef9dad13-73f8-4391-b2e8-3a043cff74c0\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483916 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts\") pod \"a8d27ef6-3306-410d-96ee-0648446c759f\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483955 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f7z9\" (UniqueName: \"kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9\") pod \"0fe42276-9012-4675-8196-28962d473856\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.483986 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fs8xd\" (UniqueName: \"kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd\") pod \"a8d27ef6-3306-410d-96ee-0648446c759f\" (UID: \"a8d27ef6-3306-410d-96ee-0648446c759f\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.484471 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmp4p\" (UniqueName: \"kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p\") pod \"54df14dc-89cd-4940-aed5-0ba757f5294c\" (UID: \"54df14dc-89cd-4940-aed5-0ba757f5294c\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.484504 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts\") pod \"0fe42276-9012-4675-8196-28962d473856\" (UID: \"0fe42276-9012-4675-8196-28962d473856\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.484551 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-554wv\" (UniqueName: \"kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv\") pod \"bc770216-f893-46e0-8e36-1c60a1094e82\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.484582 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts\") pod \"bc770216-f893-46e0-8e36-1c60a1094e82\" (UID: \"bc770216-f893-46e0-8e36-1c60a1094e82\") " Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.485784 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "54df14dc-89cd-4940-aed5-0ba757f5294c" (UID: "54df14dc-89cd-4940-aed5-0ba757f5294c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.486054 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc770216-f893-46e0-8e36-1c60a1094e82" (UID: "bc770216-f893-46e0-8e36-1c60a1094e82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.487030 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ef9dad13-73f8-4391-b2e8-3a043cff74c0" (UID: "ef9dad13-73f8-4391-b2e8-3a043cff74c0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.487193 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fe42276-9012-4675-8196-28962d473856" (UID: "0fe42276-9012-4675-8196-28962d473856"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.487196 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a8d27ef6-3306-410d-96ee-0648446c759f" (UID: "a8d27ef6-3306-410d-96ee-0648446c759f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.492653 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p" (OuterVolumeSpecName: "kube-api-access-nmp4p") pod "54df14dc-89cd-4940-aed5-0ba757f5294c" (UID: "54df14dc-89cd-4940-aed5-0ba757f5294c"). InnerVolumeSpecName "kube-api-access-nmp4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.494720 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv" (OuterVolumeSpecName: "kube-api-access-554wv") pod "bc770216-f893-46e0-8e36-1c60a1094e82" (UID: "bc770216-f893-46e0-8e36-1c60a1094e82"). InnerVolumeSpecName "kube-api-access-554wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.494938 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd" (OuterVolumeSpecName: "kube-api-access-fs8xd") pod "a8d27ef6-3306-410d-96ee-0648446c759f" (UID: "a8d27ef6-3306-410d-96ee-0648446c759f"). InnerVolumeSpecName "kube-api-access-fs8xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.495213 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29" (OuterVolumeSpecName: "kube-api-access-89m29") pod "ef9dad13-73f8-4391-b2e8-3a043cff74c0" (UID: "ef9dad13-73f8-4391-b2e8-3a043cff74c0"). InnerVolumeSpecName "kube-api-access-89m29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.498988 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9" (OuterVolumeSpecName: "kube-api-access-6f7z9") pod "0fe42276-9012-4675-8196-28962d473856" (UID: "0fe42276-9012-4675-8196-28962d473856"). InnerVolumeSpecName "kube-api-access-6f7z9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588389 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89m29\" (UniqueName: \"kubernetes.io/projected/ef9dad13-73f8-4391-b2e8-3a043cff74c0-kube-api-access-89m29\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588428 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8d27ef6-3306-410d-96ee-0648446c759f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588441 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f7z9\" (UniqueName: \"kubernetes.io/projected/0fe42276-9012-4675-8196-28962d473856-kube-api-access-6f7z9\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588452 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fs8xd\" (UniqueName: \"kubernetes.io/projected/a8d27ef6-3306-410d-96ee-0648446c759f-kube-api-access-fs8xd\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588464 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmp4p\" (UniqueName: \"kubernetes.io/projected/54df14dc-89cd-4940-aed5-0ba757f5294c-kube-api-access-nmp4p\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588478 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe42276-9012-4675-8196-28962d473856-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588489 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-554wv\" (UniqueName: \"kubernetes.io/projected/bc770216-f893-46e0-8e36-1c60a1094e82-kube-api-access-554wv\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588500 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc770216-f893-46e0-8e36-1c60a1094e82-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588511 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9dad13-73f8-4391-b2e8-3a043cff74c0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:08 crc kubenswrapper[4576]: I1203 09:11:08.588666 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54df14dc-89cd-4940-aed5-0ba757f5294c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.125943 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"8e38d962c065c36511c69dd0f5968606e18da25d58dcc67a9ce0b97db526973d"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.126314 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"6ddff9a98a292a8aed0330d95f25b79b98e85a2c9465fcdec5cb57df2b79cfc2"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.126337 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"2d963f60b092dbff92c8f3d279fc5a55ed15d4424adcafa81ed966b32025760c"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.128137 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gmdx8" event={"ID":"0fe42276-9012-4675-8196-28962d473856","Type":"ContainerDied","Data":"7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.128172 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c9ee0b146906fa027e486d84a9f87a5ac2c1993494a8abbc03affc104f7e9d3" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.128814 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gmdx8" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.129968 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cf28-account-create-update-4g5wv" event={"ID":"ef9dad13-73f8-4391-b2e8-3a043cff74c0","Type":"ContainerDied","Data":"3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.129999 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d7b93507d397117f025f350f925aeb41532cac422a98f10e42ba953ad5cf471" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.129982 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cf28-account-create-update-4g5wv" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.136722 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8s9cm" event={"ID":"a8d27ef6-3306-410d-96ee-0648446c759f","Type":"ContainerDied","Data":"4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.136902 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4497dc96b692cd990a07ae951ccdc33780be2d839c16cd30a031ca2b7f219df0" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.136731 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8s9cm" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.139145 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-917b-account-create-update-5fjkf" event={"ID":"54df14dc-89cd-4940-aed5-0ba757f5294c","Type":"ContainerDied","Data":"c1fcc174830805d2c16b57daafc558874309d2093f423951c5818cf32188677b"} Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.139168 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1fcc174830805d2c16b57daafc558874309d2093f423951c5818cf32188677b" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.139214 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-917b-account-create-update-5fjkf" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.141286 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vwggs" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.143362 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vwggs" event={"ID":"bc770216-f893-46e0-8e36-1c60a1094e82","Type":"ContainerDied","Data":"dcb30e7fa2b0010cf91db3a2e9f1119023fe58a82064663f2722e43ae8cfe0f1"} Dec 03 09:11:09 crc kubenswrapper[4576]: E1203 09:11:09.143700 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="openstack/keystone-db-sync-xpx9m" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" Dec 03 09:11:09 crc kubenswrapper[4576]: I1203 09:11:09.143717 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcb30e7fa2b0010cf91db3a2e9f1119023fe58a82064663f2722e43ae8cfe0f1" Dec 03 09:11:11 crc kubenswrapper[4576]: I1203 09:11:11.169283 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"22f5608041a1958491abd31e4a718370e82d52b17455081002a75dc7c507b427"} Dec 03 09:11:11 crc kubenswrapper[4576]: I1203 09:11:11.169693 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"ebc23cac49eb64bf994e9f62550a43cd47f8759ad1670ded25a8027c45d6dcf5"} Dec 03 09:11:11 crc kubenswrapper[4576]: I1203 09:11:11.679138 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:11:12 crc kubenswrapper[4576]: I1203 09:11:12.181839 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"18fb0c581e7c92fc40513d03ecd2a7fac0c693527c7f08ed372b18813db479a4"} Dec 03 09:11:12 crc kubenswrapper[4576]: I1203 09:11:12.182238 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"ca611536ecb6812d6d1d16b809ead7bcbe6e8dab0a8cc6419424ddb3683527ed"} Dec 03 09:11:13 crc kubenswrapper[4576]: I1203 09:11:13.194891 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc"} Dec 03 09:11:14 crc kubenswrapper[4576]: I1203 09:11:14.214818 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"8a6dac808d69f0d98ca3b58bbb1a6409bcf59c7e55c9936a8fb98d62decf5c49"} Dec 03 09:11:14 crc kubenswrapper[4576]: I1203 09:11:14.215308 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"0c97ec340b829702bbd14647c2957f029b62cb16d3ef42db3ee288dee25ae5d7"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.230384 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"f16606be19f67732c1ae0b2e826123afd7cfb64f2d9ea3c55286d747e37b8d7a"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.230973 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"32d06d58c0c31a5e5d38b6abeab61f7327b543c8f08e6d2d29837a50a4d83f67"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.230984 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"82ab906d48b3908934b749ebfe83530aeba43a56e6f400ae2a69849f1ba54168"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.230993 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"1b6661b6a6b96371df49e9fc6be52a4322a3227709a7eb5f6e94be4cef85c766"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.231003 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8e0694dc-a49e-4136-a206-3bb5c8acd48c","Type":"ContainerStarted","Data":"ecaa93773ddd057f5b7085082a0816c5a20756b573e1318a1702c805e51d52de"} Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.278692 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.272017115 podStartE2EDuration="1m0.278670872s" podCreationTimestamp="2025-12-03 09:10:15 +0000 UTC" firstStartedPulling="2025-12-03 09:10:49.594373021 +0000 UTC m=+1856.980350005" lastFinishedPulling="2025-12-03 09:11:13.601026778 +0000 UTC m=+1880.987003762" observedRunningTime="2025-12-03 09:11:15.262985053 +0000 UTC m=+1882.648962047" watchObservedRunningTime="2025-12-03 09:11:15.278670872 +0000 UTC m=+1882.664647856" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556139 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556543 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d27ef6-3306-410d-96ee-0648446c759f" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556557 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d27ef6-3306-410d-96ee-0648446c759f" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556573 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe42276-9012-4675-8196-28962d473856" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556581 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe42276-9012-4675-8196-28962d473856" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556592 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54df14dc-89cd-4940-aed5-0ba757f5294c" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556599 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="54df14dc-89cd-4940-aed5-0ba757f5294c" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556618 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce003078-de4d-4722-9e0d-3d2570c008cc" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556624 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce003078-de4d-4722-9e0d-3d2570c008cc" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556642 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9dad13-73f8-4391-b2e8-3a043cff74c0" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556648 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9dad13-73f8-4391-b2e8-3a043cff74c0" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: E1203 09:11:15.556659 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc770216-f893-46e0-8e36-1c60a1094e82" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556665 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc770216-f893-46e0-8e36-1c60a1094e82" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556812 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce003078-de4d-4722-9e0d-3d2570c008cc" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556822 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="54df14dc-89cd-4940-aed5-0ba757f5294c" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556835 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc770216-f893-46e0-8e36-1c60a1094e82" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556846 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe42276-9012-4675-8196-28962d473856" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556857 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d27ef6-3306-410d-96ee-0648446c759f" containerName="mariadb-database-create" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.556872 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef9dad13-73f8-4391-b2e8-3a043cff74c0" containerName="mariadb-account-create-update" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.557776 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.570831 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.573784 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725018 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zwrq\" (UniqueName: \"kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725088 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725223 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725296 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725417 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.725592 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.827479 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zwrq\" (UniqueName: \"kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.828224 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.828554 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.828581 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.829250 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.829378 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.829418 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.830274 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.830917 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.831216 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.831933 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.847306 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zwrq\" (UniqueName: \"kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq\") pod \"dnsmasq-dns-5c79d794d7-7rqjc\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:15 crc kubenswrapper[4576]: I1203 09:11:15.877484 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:16 crc kubenswrapper[4576]: I1203 09:11:16.313312 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:11:17 crc kubenswrapper[4576]: I1203 09:11:17.250506 4576 generic.go:334] "Generic (PLEG): container finished" podID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerID="b0b2afca975767a6ed374848a7c36f2a562362953db74c5b6766f2ffcec73b4b" exitCode=0 Dec 03 09:11:17 crc kubenswrapper[4576]: I1203 09:11:17.250576 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" event={"ID":"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a","Type":"ContainerDied","Data":"b0b2afca975767a6ed374848a7c36f2a562362953db74c5b6766f2ffcec73b4b"} Dec 03 09:11:17 crc kubenswrapper[4576]: I1203 09:11:17.250896 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" event={"ID":"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a","Type":"ContainerStarted","Data":"5b0ff2bb78f0276a3c21b86327d38255149c4bb42fe7371cb24d771dd97f9172"} Dec 03 09:11:18 crc kubenswrapper[4576]: I1203 09:11:18.261386 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" event={"ID":"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a","Type":"ContainerStarted","Data":"39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8"} Dec 03 09:11:18 crc kubenswrapper[4576]: I1203 09:11:18.262040 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:18 crc kubenswrapper[4576]: I1203 09:11:18.291522 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podStartSLOduration=3.291494479 podStartE2EDuration="3.291494479s" podCreationTimestamp="2025-12-03 09:11:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:11:18.285685839 +0000 UTC m=+1885.671662863" watchObservedRunningTime="2025-12-03 09:11:18.291494479 +0000 UTC m=+1885.677471483" Dec 03 09:11:23 crc kubenswrapper[4576]: I1203 09:11:23.324118 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9dp49" event={"ID":"9fd5058c-a47f-46cd-b7a7-d6d02014da6e","Type":"ContainerStarted","Data":"d884e6620ba50c7b8e22a8c30fd33c77f64687e765c97885ee49fb58d9ec5a2f"} Dec 03 09:11:24 crc kubenswrapper[4576]: I1203 09:11:24.339122 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xpx9m" event={"ID":"9361513a-8494-48cb-9d68-43a57f2d679f","Type":"ContainerStarted","Data":"21b7c84bfa993c1020261ed4d65627e18ca78a8bacf0338a593c23d5e573528f"} Dec 03 09:11:24 crc kubenswrapper[4576]: I1203 09:11:24.378757 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-9dp49" podStartSLOduration=3.043154618 podStartE2EDuration="40.37873392s" podCreationTimestamp="2025-12-03 09:10:44 +0000 UTC" firstStartedPulling="2025-12-03 09:10:44.893445973 +0000 UTC m=+1852.279422957" lastFinishedPulling="2025-12-03 09:11:22.229025275 +0000 UTC m=+1889.615002259" observedRunningTime="2025-12-03 09:11:23.350578057 +0000 UTC m=+1890.736555111" watchObservedRunningTime="2025-12-03 09:11:24.37873392 +0000 UTC m=+1891.764710904" Dec 03 09:11:24 crc kubenswrapper[4576]: I1203 09:11:24.379020 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xpx9m" podStartSLOduration=3.18512217 podStartE2EDuration="33.379015227s" podCreationTimestamp="2025-12-03 09:10:51 +0000 UTC" firstStartedPulling="2025-12-03 09:10:53.187999307 +0000 UTC m=+1860.573976291" lastFinishedPulling="2025-12-03 09:11:23.381892364 +0000 UTC m=+1890.767869348" observedRunningTime="2025-12-03 09:11:24.370681589 +0000 UTC m=+1891.756658593" watchObservedRunningTime="2025-12-03 09:11:24.379015227 +0000 UTC m=+1891.764992221" Dec 03 09:11:25 crc kubenswrapper[4576]: I1203 09:11:25.878818 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:11:25 crc kubenswrapper[4576]: I1203 09:11:25.940013 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:11:25 crc kubenswrapper[4576]: I1203 09:11:25.940326 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="dnsmasq-dns" containerID="cri-o://bf907d636153951783862262bfd32ff9c662ad26d21b8b1e9058b8569130f578" gracePeriod=10 Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.366063 4576 generic.go:334] "Generic (PLEG): container finished" podID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerID="bf907d636153951783862262bfd32ff9c662ad26d21b8b1e9058b8569130f578" exitCode=0 Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.366123 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" event={"ID":"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7","Type":"ContainerDied","Data":"bf907d636153951783862262bfd32ff9c662ad26d21b8b1e9058b8569130f578"} Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.495937 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.622399 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb\") pod \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.622827 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc\") pod \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.622853 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config\") pod \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.622937 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67xjq\" (UniqueName: \"kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq\") pod \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.622988 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb\") pod \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\" (UID: \"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7\") " Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.642248 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq" (OuterVolumeSpecName: "kube-api-access-67xjq") pod "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" (UID: "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7"). InnerVolumeSpecName "kube-api-access-67xjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.694695 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" (UID: "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.703249 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" (UID: "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.709755 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" (UID: "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.725359 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.725669 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67xjq\" (UniqueName: \"kubernetes.io/projected/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-kube-api-access-67xjq\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.725829 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.725887 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.733290 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config" (OuterVolumeSpecName: "config") pod "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" (UID: "9d0cf9db-537a-41b4-bb17-d5f1a42f84e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:26 crc kubenswrapper[4576]: I1203 09:11:26.827102 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.377223 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" event={"ID":"9d0cf9db-537a-41b4-bb17-d5f1a42f84e7","Type":"ContainerDied","Data":"5b7fa9c2d18128ff368e9c781a90fe3aa6b1015491f139bb7225737a04c27740"} Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.377302 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.377313 4576 scope.go:117] "RemoveContainer" containerID="bf907d636153951783862262bfd32ff9c662ad26d21b8b1e9058b8569130f578" Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.398857 4576 scope.go:117] "RemoveContainer" containerID="4a74b884b9e59fc885fc851ef6fe5473aaeaad653e43f2f026e624f29daeab06" Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.426557 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.434232 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-dfdr5"] Dec 03 09:11:27 crc kubenswrapper[4576]: I1203 09:11:27.686962 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" path="/var/lib/kubelet/pods/9d0cf9db-537a-41b4-bb17-d5f1a42f84e7/volumes" Dec 03 09:11:28 crc kubenswrapper[4576]: I1203 09:11:28.389553 4576 generic.go:334] "Generic (PLEG): container finished" podID="9361513a-8494-48cb-9d68-43a57f2d679f" containerID="21b7c84bfa993c1020261ed4d65627e18ca78a8bacf0338a593c23d5e573528f" exitCode=0 Dec 03 09:11:28 crc kubenswrapper[4576]: I1203 09:11:28.389644 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xpx9m" event={"ID":"9361513a-8494-48cb-9d68-43a57f2d679f","Type":"ContainerDied","Data":"21b7c84bfa993c1020261ed4d65627e18ca78a8bacf0338a593c23d5e573528f"} Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.762378 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.876111 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle\") pod \"9361513a-8494-48cb-9d68-43a57f2d679f\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.876374 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldj7s\" (UniqueName: \"kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s\") pod \"9361513a-8494-48cb-9d68-43a57f2d679f\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.876496 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data\") pod \"9361513a-8494-48cb-9d68-43a57f2d679f\" (UID: \"9361513a-8494-48cb-9d68-43a57f2d679f\") " Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.889747 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s" (OuterVolumeSpecName: "kube-api-access-ldj7s") pod "9361513a-8494-48cb-9d68-43a57f2d679f" (UID: "9361513a-8494-48cb-9d68-43a57f2d679f"). InnerVolumeSpecName "kube-api-access-ldj7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.931059 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data" (OuterVolumeSpecName: "config-data") pod "9361513a-8494-48cb-9d68-43a57f2d679f" (UID: "9361513a-8494-48cb-9d68-43a57f2d679f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.936188 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9361513a-8494-48cb-9d68-43a57f2d679f" (UID: "9361513a-8494-48cb-9d68-43a57f2d679f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.980519 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.980621 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldj7s\" (UniqueName: \"kubernetes.io/projected/9361513a-8494-48cb-9d68-43a57f2d679f-kube-api-access-ldj7s\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:29 crc kubenswrapper[4576]: I1203 09:11:29.980637 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9361513a-8494-48cb-9d68-43a57f2d679f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.413968 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xpx9m" event={"ID":"9361513a-8494-48cb-9d68-43a57f2d679f","Type":"ContainerDied","Data":"9f399ec9e6360a0ddcd7a15653e261c6537bcd87a7affa843613bfa3cefcf3c0"} Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.414367 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f399ec9e6360a0ddcd7a15653e261c6537bcd87a7affa843613bfa3cefcf3c0" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.414051 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xpx9m" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.676574 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gffzv"] Dec 03 09:11:30 crc kubenswrapper[4576]: E1203 09:11:30.677029 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="dnsmasq-dns" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.677054 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="dnsmasq-dns" Dec 03 09:11:30 crc kubenswrapper[4576]: E1203 09:11:30.677082 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" containerName="keystone-db-sync" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.677093 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" containerName="keystone-db-sync" Dec 03 09:11:30 crc kubenswrapper[4576]: E1203 09:11:30.677121 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="init" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.677133 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="init" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.677359 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" containerName="keystone-db-sync" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.677397 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="dnsmasq-dns" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.679385 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.684904 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.685128 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.685236 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8h9rk" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.685364 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.685469 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.783581 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.785283 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.795985 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.796086 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.796117 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.796139 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw5kl\" (UniqueName: \"kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.796156 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.796962 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.817808 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gffzv"] Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.828614 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.899287 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.899940 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.899993 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900024 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw5kl\" (UniqueName: \"kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900045 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900061 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900295 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900342 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900377 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900397 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900435 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.900468 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcnrm\" (UniqueName: \"kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.914056 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.914631 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.919275 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.923828 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.943225 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:30 crc kubenswrapper[4576]: I1203 09:11:30.953327 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw5kl\" (UniqueName: \"kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl\") pod \"keystone-bootstrap-gffzv\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.010954 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.011798 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.011981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.012081 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.012201 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.012339 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcnrm\" (UniqueName: \"kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.012442 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.013664 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.013166 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.012623 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.014382 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.015174 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.104037 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-qwbz9"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.105386 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.115394 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.115489 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.115561 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsjgx\" (UniqueName: \"kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.137316 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.137702 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.138010 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lk4g6" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.174301 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcnrm\" (UniqueName: \"kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm\") pod \"dnsmasq-dns-5b868669f-2w6g8\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.191481 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qwbz9"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.218792 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.218848 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.218899 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsjgx\" (UniqueName: \"kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.228332 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.252954 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.295399 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-qqg9g"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.297629 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-dfdr5" podUID="9d0cf9db-537a-41b4-bb17-d5f1a42f84e7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.321815 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.340073 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.340257 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rb52r" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.340376 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.392254 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsjgx\" (UniqueName: \"kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx\") pod \"neutron-db-sync-qwbz9\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.410799 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.432653 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.434131 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.449537 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-hwwrh" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.462666 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.466595 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qqg9g"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.482881 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.483137 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.483310 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.497957 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.508417 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.521126 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.521362 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533227 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533296 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533321 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533352 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwc9s\" (UniqueName: \"kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533378 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533392 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533407 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533445 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533461 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9njg\" (UniqueName: \"kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533485 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.533508 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.563946 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.579617 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645128 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645191 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645294 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645406 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645447 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645568 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645666 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645702 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2m6c\" (UniqueName: \"kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645823 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.645863 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.646016 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwc9s\" (UniqueName: \"kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.646138 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.646158 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.646326 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.646476 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.669909 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.670603 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.682647 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9njg\" (UniqueName: \"kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.685933 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.686578 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.671469 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.671832 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.705280 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.706925 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.749551 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.769616 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.772280 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.863401 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9njg\" (UniqueName: \"kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg\") pod \"cinder-db-sync-qqg9g\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.863751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwc9s\" (UniqueName: \"kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s\") pod \"horizon-574f45d7f7-l8jvd\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.868377 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2m6c\" (UniqueName: \"kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.868488 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.868573 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.868601 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.868641 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.874992 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.878816 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.878891 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.897117 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.927047 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.928056 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.931406 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.937130 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-988np"] Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.938930 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-988np" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.940676 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:31 crc kubenswrapper[4576]: I1203 09:11:31.987877 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.004511 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-988np"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.015933 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.025268 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s6zfp" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.080191 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2m6c\" (UniqueName: \"kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c\") pod \"ceilometer-0\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " pod="openstack/ceilometer-0" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.081043 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.108737 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.127044 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.144809 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.146375 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.187199 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.213734 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217731 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfqn6\" (UniqueName: \"kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217773 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217830 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217858 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217899 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxk8d\" (UniqueName: \"kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217916 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217945 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.217988 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.230825 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-2njh4"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.231113 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.241337 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.274052 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.283424 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2njh4"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.286372 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.286437 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-s56sh" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.286518 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319044 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319148 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319178 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319246 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319303 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319360 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfqn6\" (UniqueName: \"kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319390 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319428 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319504 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319567 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319595 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2phz\" (UniqueName: \"kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319627 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319673 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k6g6\" (UniqueName: \"kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319733 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319763 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319787 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319814 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319841 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxk8d\" (UniqueName: \"kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.319865 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.325317 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.325883 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.330806 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.336718 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.365668 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.381271 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxk8d\" (UniqueName: \"kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d\") pod \"horizon-79d544fc65-l8nxr\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.381293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.385699 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfqn6\" (UniqueName: \"kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6\") pod \"barbican-db-sync-988np\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.400050 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gffzv"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.428943 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.428990 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2phz\" (UniqueName: \"kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429045 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k6g6\" (UniqueName: \"kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429091 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429110 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429125 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429144 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429240 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429258 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429337 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429402 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.429917 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.431554 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.432553 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.434033 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.434573 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.435054 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.470479 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-988np" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.472575 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.477400 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.483619 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2phz\" (UniqueName: \"kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.484223 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k6g6\" (UniqueName: \"kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6\") pod \"dnsmasq-dns-cf78879c9-kvbcw\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.503231 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data\") pod \"placement-db-sync-2njh4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.557782 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.601599 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.726459 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:32 crc kubenswrapper[4576]: I1203 09:11:32.770844 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2njh4" Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.044809 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qwbz9"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.244493 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.416226 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qqg9g"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.454316 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.548162 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" event={"ID":"5e307ec2-8d13-4c97-8a45-26f4f2fa3962","Type":"ContainerStarted","Data":"4c6fe04d600d30422d3d6beeed2b73657782ab073821308acc139ec84fffb195"} Dec 03 09:11:33 crc kubenswrapper[4576]: W1203 09:11:33.554203 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd7fc7a1_d409_4c09_97a6_dd063b1e7647.slice/crio-779b4f4a0cdf463feac1dc479b4bf98ed2e242f99e8c3df8f5186ed9ed69067f WatchSource:0}: Error finding container 779b4f4a0cdf463feac1dc479b4bf98ed2e242f99e8c3df8f5186ed9ed69067f: Status 404 returned error can't find the container with id 779b4f4a0cdf463feac1dc479b4bf98ed2e242f99e8c3df8f5186ed9ed69067f Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.554382 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.557393 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerStarted","Data":"7b3e903901080426c7c3f12f79ce1f6eb6bcc782e089b6d983383afe055fb3d3"} Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.578982 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-574f45d7f7-l8jvd" event={"ID":"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88","Type":"ContainerStarted","Data":"3509987a2a85dfddefacd3a91b99730291a92bbaf17b23e695df8bf541c3b2d8"} Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.580601 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-988np"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.595219 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwbz9" event={"ID":"a1b7c2fb-e839-4698-8319-3f8eae6e46d6","Type":"ContainerStarted","Data":"0aa870d89d046595377172bc442499c143e5905c5a22bee93c1634f6bc3e5cc1"} Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.597390 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qqg9g" event={"ID":"047a7c95-c4e3-46fa-8b1b-2a351992493e","Type":"ContainerStarted","Data":"e264ec8476c2c8c9724caf9f0db8d94f54cb9499b0cbfc9a38a01a1108461053"} Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.598927 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gffzv" event={"ID":"361add1e-7a29-4f4a-9784-1ddfb1cb0de5","Type":"ContainerStarted","Data":"15072a9d4227c664a62c47c79e2a64bc58da1924fe88a6731d2ec90e853e87a6"} Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.845815 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2njh4"] Dec 03 09:11:33 crc kubenswrapper[4576]: I1203 09:11:33.865415 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:11:33 crc kubenswrapper[4576]: W1203 09:11:33.875263 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0af2ec10_0960_41c2_bbb9_aeffd38aa679.slice/crio-60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff WatchSource:0}: Error finding container 60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff: Status 404 returned error can't find the container with id 60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.634373 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2njh4" event={"ID":"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4","Type":"ContainerStarted","Data":"0a7b7045f4efb8b09ff0a1e38631882bc81491a4e943cb69e7fdbb760bed1cad"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.635762 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" event={"ID":"0af2ec10-0960-41c2-bbb9-aeffd38aa679","Type":"ContainerStarted","Data":"60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.638553 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-988np" event={"ID":"437286d3-1147-43ef-945f-8612d1610427","Type":"ContainerStarted","Data":"d004bca179190e860d0d1178fc26dce66f7ec5eb8f1a5dbb24451b09daee78e9"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.652473 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwbz9" event={"ID":"a1b7c2fb-e839-4698-8319-3f8eae6e46d6","Type":"ContainerStarted","Data":"763ab8a21ac3fe408c84d9db83465d682f64fd4ed25ba98019c21823b4135d4a"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.658842 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gffzv" event={"ID":"361add1e-7a29-4f4a-9784-1ddfb1cb0de5","Type":"ContainerStarted","Data":"563ee6a4f03a3609f385466bb3a6548e1faf0a4259df8814f616a82c6971b46a"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.686390 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79d544fc65-l8nxr" event={"ID":"dd7fc7a1-d409-4c09-97a6-dd063b1e7647","Type":"ContainerStarted","Data":"779b4f4a0cdf463feac1dc479b4bf98ed2e242f99e8c3df8f5186ed9ed69067f"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.695119 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-qwbz9" podStartSLOduration=4.695100105 podStartE2EDuration="4.695100105s" podCreationTimestamp="2025-12-03 09:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:11:34.687263001 +0000 UTC m=+1902.073240015" watchObservedRunningTime="2025-12-03 09:11:34.695100105 +0000 UTC m=+1902.081077089" Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.700245 4576 generic.go:334] "Generic (PLEG): container finished" podID="5e307ec2-8d13-4c97-8a45-26f4f2fa3962" containerID="2f63975aa7a910cf19b4188f6331e3bdf9225c276c221d2181855f85c8b736b4" exitCode=0 Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.700314 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" event={"ID":"5e307ec2-8d13-4c97-8a45-26f4f2fa3962","Type":"ContainerDied","Data":"2f63975aa7a910cf19b4188f6331e3bdf9225c276c221d2181855f85c8b736b4"} Dec 03 09:11:34 crc kubenswrapper[4576]: I1203 09:11:34.721506 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gffzv" podStartSLOduration=4.721484738 podStartE2EDuration="4.721484738s" podCreationTimestamp="2025-12-03 09:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:11:34.71501206 +0000 UTC m=+1902.100989044" watchObservedRunningTime="2025-12-03 09:11:34.721484738 +0000 UTC m=+1902.107461722" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.372256 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476651 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476800 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476821 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476856 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcnrm\" (UniqueName: \"kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476903 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.476939 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb\") pod \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\" (UID: \"5e307ec2-8d13-4c97-8a45-26f4f2fa3962\") " Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.503598 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config" (OuterVolumeSpecName: "config") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.513197 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.549644 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.556662 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm" (OuterVolumeSpecName: "kube-api-access-kcnrm") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "kube-api-access-kcnrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.583463 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.583518 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.583543 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcnrm\" (UniqueName: \"kubernetes.io/projected/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-kube-api-access-kcnrm\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.583558 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.585611 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.609507 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e307ec2-8d13-4c97-8a45-26f4f2fa3962" (UID: "5e307ec2-8d13-4c97-8a45-26f4f2fa3962"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.691372 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.691426 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e307ec2-8d13-4c97-8a45-26f4f2fa3962-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.781714 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.782035 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-2w6g8" event={"ID":"5e307ec2-8d13-4c97-8a45-26f4f2fa3962","Type":"ContainerDied","Data":"4c6fe04d600d30422d3d6beeed2b73657782ab073821308acc139ec84fffb195"} Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.782173 4576 scope.go:117] "RemoveContainer" containerID="2f63975aa7a910cf19b4188f6331e3bdf9225c276c221d2181855f85c8b736b4" Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.792922 4576 generic.go:334] "Generic (PLEG): container finished" podID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerID="179ae80f0419e58b0552719630ed2011085849e21e27b11bc75902ab600a7dd6" exitCode=0 Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.809810 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" event={"ID":"0af2ec10-0960-41c2-bbb9-aeffd38aa679","Type":"ContainerDied","Data":"179ae80f0419e58b0552719630ed2011085849e21e27b11bc75902ab600a7dd6"} Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.979492 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:35 crc kubenswrapper[4576]: I1203 09:11:35.988382 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-2w6g8"] Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.061604 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.107736 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.153549 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:11:36 crc kubenswrapper[4576]: E1203 09:11:36.154096 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e307ec2-8d13-4c97-8a45-26f4f2fa3962" containerName="init" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.154112 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e307ec2-8d13-4c97-8a45-26f4f2fa3962" containerName="init" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.154349 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e307ec2-8d13-4c97-8a45-26f4f2fa3962" containerName="init" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.155484 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.204025 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.326153 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.326217 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.326259 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.326373 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bvk8\" (UniqueName: \"kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.326402 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.428113 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.428155 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.428237 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bvk8\" (UniqueName: \"kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.428263 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.428318 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.429202 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.431210 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.432044 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.439134 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.456754 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bvk8\" (UniqueName: \"kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8\") pod \"horizon-5c8794b7bf-jdk7k\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:36 crc kubenswrapper[4576]: I1203 09:11:36.524058 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:11:37 crc kubenswrapper[4576]: I1203 09:11:37.573248 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:11:37 crc kubenswrapper[4576]: W1203 09:11:37.680295 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14e9436f_f375_45db_9619_2c3bd879fe07.slice/crio-53ac56f2e6a7b4b1183d3711a7931205f0f6ada5c01447edde34e231baf2fa73 WatchSource:0}: Error finding container 53ac56f2e6a7b4b1183d3711a7931205f0f6ada5c01447edde34e231baf2fa73: Status 404 returned error can't find the container with id 53ac56f2e6a7b4b1183d3711a7931205f0f6ada5c01447edde34e231baf2fa73 Dec 03 09:11:37 crc kubenswrapper[4576]: I1203 09:11:37.700486 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e307ec2-8d13-4c97-8a45-26f4f2fa3962" path="/var/lib/kubelet/pods/5e307ec2-8d13-4c97-8a45-26f4f2fa3962/volumes" Dec 03 09:11:37 crc kubenswrapper[4576]: I1203 09:11:37.829088 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c8794b7bf-jdk7k" event={"ID":"14e9436f-f375-45db-9619-2c3bd879fe07","Type":"ContainerStarted","Data":"53ac56f2e6a7b4b1183d3711a7931205f0f6ada5c01447edde34e231baf2fa73"} Dec 03 09:11:37 crc kubenswrapper[4576]: I1203 09:11:37.832566 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" event={"ID":"0af2ec10-0960-41c2-bbb9-aeffd38aa679","Type":"ContainerStarted","Data":"ec00d3b0d8f59cfb96ee204c863c49f8643d79ffdf4edeb79161f50f54360d0e"} Dec 03 09:11:37 crc kubenswrapper[4576]: I1203 09:11:37.832857 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:38 crc kubenswrapper[4576]: I1203 09:11:38.028228 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" podStartSLOduration=6.028206965 podStartE2EDuration="6.028206965s" podCreationTimestamp="2025-12-03 09:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:11:37.85317494 +0000 UTC m=+1905.239151934" watchObservedRunningTime="2025-12-03 09:11:38.028206965 +0000 UTC m=+1905.414183949" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.345689 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.400021 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.401644 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.404063 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.427380 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.516263 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581747 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581824 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581855 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581919 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581969 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8rgb\" (UniqueName: \"kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.581998 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.582019 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.587517 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d649695d8-6rtxn"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.589647 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.622476 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d649695d8-6rtxn"] Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.682916 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.682989 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8rgb\" (UniqueName: \"kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.683030 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.683055 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.683110 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.683131 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.683157 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.684903 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.685814 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.686746 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.699233 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.710838 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8rgb\" (UniqueName: \"kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.712037 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.714310 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key\") pod \"horizon-5d9b9454d4-cbqlk\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.743918 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784308 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-combined-ca-bundle\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784365 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/288ed488-5270-4966-b866-f9f015262989-logs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784403 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-scripts\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784477 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-tls-certs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784502 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-config-data\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784515 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-secret-key\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.784554 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbnxg\" (UniqueName: \"kubernetes.io/projected/288ed488-5270-4966-b866-f9f015262989-kube-api-access-mbnxg\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886276 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-combined-ca-bundle\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886344 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/288ed488-5270-4966-b866-f9f015262989-logs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886406 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-scripts\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886492 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-tls-certs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886517 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-config-data\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886581 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-secret-key\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.886613 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbnxg\" (UniqueName: \"kubernetes.io/projected/288ed488-5270-4966-b866-f9f015262989-kube-api-access-mbnxg\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.888060 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-scripts\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.889381 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/288ed488-5270-4966-b866-f9f015262989-logs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.889640 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288ed488-5270-4966-b866-f9f015262989-config-data\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.892423 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-secret-key\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.893447 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-combined-ca-bundle\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.896096 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed488-5270-4966-b866-f9f015262989-horizon-tls-certs\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.907337 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbnxg\" (UniqueName: \"kubernetes.io/projected/288ed488-5270-4966-b866-f9f015262989-kube-api-access-mbnxg\") pod \"horizon-6d649695d8-6rtxn\" (UID: \"288ed488-5270-4966-b866-f9f015262989\") " pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:41 crc kubenswrapper[4576]: I1203 09:11:41.907811 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:11:42 crc kubenswrapper[4576]: I1203 09:11:42.728764 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:11:42 crc kubenswrapper[4576]: I1203 09:11:42.813160 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:11:42 crc kubenswrapper[4576]: I1203 09:11:42.817851 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" containerID="cri-o://39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8" gracePeriod=10 Dec 03 09:11:42 crc kubenswrapper[4576]: I1203 09:11:42.975566 4576 generic.go:334] "Generic (PLEG): container finished" podID="361add1e-7a29-4f4a-9784-1ddfb1cb0de5" containerID="563ee6a4f03a3609f385466bb3a6548e1faf0a4259df8814f616a82c6971b46a" exitCode=0 Dec 03 09:11:42 crc kubenswrapper[4576]: I1203 09:11:42.975818 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gffzv" event={"ID":"361add1e-7a29-4f4a-9784-1ddfb1cb0de5","Type":"ContainerDied","Data":"563ee6a4f03a3609f385466bb3a6548e1faf0a4259df8814f616a82c6971b46a"} Dec 03 09:11:43 crc kubenswrapper[4576]: E1203 09:11:43.145483 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d9c9fe7_e79c_4dff_a924_8dd9f4b7939a.slice/crio-39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d9c9fe7_e79c_4dff_a924_8dd9f4b7939a.slice/crio-conmon-39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:11:44 crc kubenswrapper[4576]: I1203 09:11:44.017833 4576 generic.go:334] "Generic (PLEG): container finished" podID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" containerID="d884e6620ba50c7b8e22a8c30fd33c77f64687e765c97885ee49fb58d9ec5a2f" exitCode=0 Dec 03 09:11:44 crc kubenswrapper[4576]: I1203 09:11:44.017920 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9dp49" event={"ID":"9fd5058c-a47f-46cd-b7a7-d6d02014da6e","Type":"ContainerDied","Data":"d884e6620ba50c7b8e22a8c30fd33c77f64687e765c97885ee49fb58d9ec5a2f"} Dec 03 09:11:44 crc kubenswrapper[4576]: I1203 09:11:44.030737 4576 generic.go:334] "Generic (PLEG): container finished" podID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerID="39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8" exitCode=0 Dec 03 09:11:44 crc kubenswrapper[4576]: I1203 09:11:44.031006 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" event={"ID":"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a","Type":"ContainerDied","Data":"39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8"} Dec 03 09:11:45 crc kubenswrapper[4576]: I1203 09:11:45.879267 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 03 09:11:50 crc kubenswrapper[4576]: I1203 09:11:50.877978 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 03 09:11:53 crc kubenswrapper[4576]: E1203 09:11:53.758684 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 09:11:53 crc kubenswrapper[4576]: E1203 09:11:53.760247 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n74h576hch687hd6hd5h68fh5cfh84h65ch8bhf6h659h64dh594h684h598h547h57h658h579hd7h677hc9h675hd9h8bh55bh56fh8dh9dh67bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2bvk8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5c8794b7bf-jdk7k_openstack(14e9436f-f375-45db-9619-2c3bd879fe07): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:11:53 crc kubenswrapper[4576]: E1203 09:11:53.762945 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5c8794b7bf-jdk7k" podUID="14e9436f-f375-45db-9619-2c3bd879fe07" Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.873683 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.902271 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9dp49" Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.980963 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.981065 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.981151 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.981179 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.981228 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lw5kl\" (UniqueName: \"kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:53 crc kubenswrapper[4576]: I1203 09:11:53.981385 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle\") pod \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\" (UID: \"361add1e-7a29-4f4a-9784-1ddfb1cb0de5\") " Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:53.992657 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts" (OuterVolumeSpecName: "scripts") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.003057 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.009270 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.016496 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl" (OuterVolumeSpecName: "kube-api-access-lw5kl") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "kube-api-access-lw5kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.042212 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.047610 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data" (OuterVolumeSpecName: "config-data") pod "361add1e-7a29-4f4a-9784-1ddfb1cb0de5" (UID: "361add1e-7a29-4f4a-9784-1ddfb1cb0de5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.082947 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwdf4\" (UniqueName: \"kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4\") pod \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.083094 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data\") pod \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.083211 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data\") pod \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.083276 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle\") pod \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\" (UID: \"9fd5058c-a47f-46cd-b7a7-d6d02014da6e\") " Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084012 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084041 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084056 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084068 4576 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084078 4576 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.084089 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lw5kl\" (UniqueName: \"kubernetes.io/projected/361add1e-7a29-4f4a-9784-1ddfb1cb0de5-kube-api-access-lw5kl\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.091301 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9fd5058c-a47f-46cd-b7a7-d6d02014da6e" (UID: "9fd5058c-a47f-46cd-b7a7-d6d02014da6e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.108220 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4" (OuterVolumeSpecName: "kube-api-access-zwdf4") pod "9fd5058c-a47f-46cd-b7a7-d6d02014da6e" (UID: "9fd5058c-a47f-46cd-b7a7-d6d02014da6e"). InnerVolumeSpecName "kube-api-access-zwdf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.120667 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9fd5058c-a47f-46cd-b7a7-d6d02014da6e" (UID: "9fd5058c-a47f-46cd-b7a7-d6d02014da6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.156511 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data" (OuterVolumeSpecName: "config-data") pod "9fd5058c-a47f-46cd-b7a7-d6d02014da6e" (UID: "9fd5058c-a47f-46cd-b7a7-d6d02014da6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.186052 4576 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.186088 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.186100 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwdf4\" (UniqueName: \"kubernetes.io/projected/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-kube-api-access-zwdf4\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.186110 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fd5058c-a47f-46cd-b7a7-d6d02014da6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.206489 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9dp49" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.206672 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9dp49" event={"ID":"9fd5058c-a47f-46cd-b7a7-d6d02014da6e","Type":"ContainerDied","Data":"0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73"} Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.206761 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bbfcd8f11eeb0de64fe101409713b8b3fcc168c812a86e790f83cbe4c2b6f73" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.209605 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gffzv" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.209683 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gffzv" event={"ID":"361add1e-7a29-4f4a-9784-1ddfb1cb0de5","Type":"ContainerDied","Data":"15072a9d4227c664a62c47c79e2a64bc58da1924fe88a6731d2ec90e853e87a6"} Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.209722 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15072a9d4227c664a62c47c79e2a64bc58da1924fe88a6731d2ec90e853e87a6" Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.967709 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gffzv"] Dec 03 09:11:54 crc kubenswrapper[4576]: I1203 09:11:54.971945 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gffzv"] Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.077874 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-w66xd"] Dec 03 09:11:55 crc kubenswrapper[4576]: E1203 09:11:55.078321 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361add1e-7a29-4f4a-9784-1ddfb1cb0de5" containerName="keystone-bootstrap" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.078338 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="361add1e-7a29-4f4a-9784-1ddfb1cb0de5" containerName="keystone-bootstrap" Dec 03 09:11:55 crc kubenswrapper[4576]: E1203 09:11:55.078353 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" containerName="glance-db-sync" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.078361 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" containerName="glance-db-sync" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.078620 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="361add1e-7a29-4f4a-9784-1ddfb1cb0de5" containerName="keystone-bootstrap" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.078647 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" containerName="glance-db-sync" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.079371 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.086912 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.086944 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.087130 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.087302 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.086433 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8h9rk" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.110740 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w66xd"] Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.209614 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8s5j\" (UniqueName: \"kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.209944 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.209989 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.210010 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.210037 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.210052 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312276 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8s5j\" (UniqueName: \"kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312358 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312421 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312438 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312487 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.312501 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.319211 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.319251 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.319913 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.323229 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.324650 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.443402 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8s5j\" (UniqueName: \"kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j\") pod \"keystone-bootstrap-w66xd\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.559866 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.561464 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.624705 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.703337 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="361add1e-7a29-4f4a-9784-1ddfb1cb0de5" path="/var/lib/kubelet/pods/361add1e-7a29-4f4a-9784-1ddfb1cb0de5/volumes" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.704027 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.718692 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.718935 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.719103 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.719210 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.719323 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.719447 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sthtq\" (UniqueName: \"kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821616 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821658 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821700 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821744 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sthtq\" (UniqueName: \"kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821789 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.821824 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.822728 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.822864 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.832157 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.832328 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.832402 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.853692 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sthtq\" (UniqueName: \"kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq\") pod \"dnsmasq-dns-56df8fb6b7-dl696\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:55 crc kubenswrapper[4576]: I1203 09:11:55.900056 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.324700 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.326565 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.334940 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.335127 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-n9x4j" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.335537 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.357319 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.436773 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.436882 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vqp8\" (UniqueName: \"kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.436912 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.436926 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.436964 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.437035 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.437099 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538566 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538650 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538681 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vqp8\" (UniqueName: \"kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538703 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538717 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538743 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.538777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.539219 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.539293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.539326 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.544639 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.545412 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.553098 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.554121 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vqp8\" (UniqueName: \"kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.577785 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " pod="openstack/glance-default-external-api-0" Dec 03 09:11:56 crc kubenswrapper[4576]: I1203 09:11:56.646725 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.032960 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.034315 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.037407 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.055676 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.148803 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.148859 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99vhf\" (UniqueName: \"kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.148885 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.149334 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.149383 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.149474 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.149552 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.252212 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.252861 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.253798 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.253887 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.253930 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.253980 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.254012 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99vhf\" (UniqueName: \"kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.254197 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.254845 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.257564 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.261827 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.273558 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.283915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99vhf\" (UniqueName: \"kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.298126 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.298311 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:11:57 crc kubenswrapper[4576]: I1203 09:11:57.411673 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:11:58 crc kubenswrapper[4576]: I1203 09:11:58.517553 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:11:58 crc kubenswrapper[4576]: I1203 09:11:58.811440 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:00 crc kubenswrapper[4576]: I1203 09:12:00.879274 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:00 crc kubenswrapper[4576]: I1203 09:12:00.879997 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:12:04 crc kubenswrapper[4576]: E1203 09:12:04.108049 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 03 09:12:04 crc kubenswrapper[4576]: E1203 09:12:04.108874 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q2phz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-2njh4_openstack(cdef871a-e0bf-42eb-b9d5-bcf0777fbec4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:12:04 crc kubenswrapper[4576]: E1203 09:12:04.110204 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-2njh4" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.134688 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.177072 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bvk8\" (UniqueName: \"kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8\") pod \"14e9436f-f375-45db-9619-2c3bd879fe07\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.177153 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs\") pod \"14e9436f-f375-45db-9619-2c3bd879fe07\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.177188 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts\") pod \"14e9436f-f375-45db-9619-2c3bd879fe07\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.177223 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data\") pod \"14e9436f-f375-45db-9619-2c3bd879fe07\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.177262 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key\") pod \"14e9436f-f375-45db-9619-2c3bd879fe07\" (UID: \"14e9436f-f375-45db-9619-2c3bd879fe07\") " Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.179804 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs" (OuterVolumeSpecName: "logs") pod "14e9436f-f375-45db-9619-2c3bd879fe07" (UID: "14e9436f-f375-45db-9619-2c3bd879fe07"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.180408 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts" (OuterVolumeSpecName: "scripts") pod "14e9436f-f375-45db-9619-2c3bd879fe07" (UID: "14e9436f-f375-45db-9619-2c3bd879fe07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.182917 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data" (OuterVolumeSpecName: "config-data") pod "14e9436f-f375-45db-9619-2c3bd879fe07" (UID: "14e9436f-f375-45db-9619-2c3bd879fe07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.192879 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8" (OuterVolumeSpecName: "kube-api-access-2bvk8") pod "14e9436f-f375-45db-9619-2c3bd879fe07" (UID: "14e9436f-f375-45db-9619-2c3bd879fe07"). InnerVolumeSpecName "kube-api-access-2bvk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.205684 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "14e9436f-f375-45db-9619-2c3bd879fe07" (UID: "14e9436f-f375-45db-9619-2c3bd879fe07"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.281883 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bvk8\" (UniqueName: \"kubernetes.io/projected/14e9436f-f375-45db-9619-2c3bd879fe07-kube-api-access-2bvk8\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.281919 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e9436f-f375-45db-9619-2c3bd879fe07-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.281931 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.281940 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/14e9436f-f375-45db-9619-2c3bd879fe07-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.281950 4576 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/14e9436f-f375-45db-9619-2c3bd879fe07-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.319641 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c8794b7bf-jdk7k" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.319646 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c8794b7bf-jdk7k" event={"ID":"14e9436f-f375-45db-9619-2c3bd879fe07","Type":"ContainerDied","Data":"53ac56f2e6a7b4b1183d3711a7931205f0f6ada5c01447edde34e231baf2fa73"} Dec 03 09:12:04 crc kubenswrapper[4576]: E1203 09:12:04.321451 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-2njh4" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.418808 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:12:04 crc kubenswrapper[4576]: I1203 09:12:04.431430 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5c8794b7bf-jdk7k"] Dec 03 09:12:05 crc kubenswrapper[4576]: I1203 09:12:05.700776 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14e9436f-f375-45db-9619-2c3bd879fe07" path="/var/lib/kubelet/pods/14e9436f-f375-45db-9619-2c3bd879fe07/volumes" Dec 03 09:12:05 crc kubenswrapper[4576]: I1203 09:12:05.880648 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.718383 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.719379 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf7h66bh568hcbh7hbbh5f9h5d4h55bh5f7h564h699h574hb4h565h695h5fh6ch5fh567hf8h587h9ch595h585hc5h4h5d9h58bhbbh565h5bcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bxk8d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-79d544fc65-l8nxr_openstack(dd7fc7a1-d409-4c09-97a6-dd063b1e7647): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.721627 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.721779 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c8h5c4h95h5c7h5f8h648h5b7h55ch645h77h658h5c6h5dfh9dh64dh5bfhf8h666h56ch9h694hf8h647h665h647h54dhbh575h64h98h598hbbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zwc9s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-574f45d7f7-l8jvd_openstack(b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.721945 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-79d544fc65-l8nxr" podUID="dd7fc7a1-d409-4c09-97a6-dd063b1e7647" Dec 03 09:12:06 crc kubenswrapper[4576]: E1203 09:12:06.723939 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-574f45d7f7-l8jvd" podUID="b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" Dec 03 09:12:10 crc kubenswrapper[4576]: I1203 09:12:10.881770 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:15 crc kubenswrapper[4576]: I1203 09:12:15.883495 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:20 crc kubenswrapper[4576]: I1203 09:12:20.884737 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.671117 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789338 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789400 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789453 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zwrq\" (UniqueName: \"kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789482 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789561 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.789586 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config\") pod \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\" (UID: \"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a\") " Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.803214 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq" (OuterVolumeSpecName: "kube-api-access-4zwrq") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "kube-api-access-4zwrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.843325 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config" (OuterVolumeSpecName: "config") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.846503 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.850070 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.853781 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.863338 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" (UID: "0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.892557 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893425 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893448 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893458 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zwrq\" (UniqueName: \"kubernetes.io/projected/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-kube-api-access-4zwrq\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893466 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893476 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:25 crc kubenswrapper[4576]: I1203 09:12:25.893484 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.536897 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" event={"ID":"0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a","Type":"ContainerDied","Data":"5b0ff2bb78f0276a3c21b86327d38255149c4bb42fe7371cb24d771dd97f9172"} Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.536977 4576 scope.go:117] "RemoveContainer" containerID="39d00c03f46805cb84da9b3ae3c2f088c9132fc8021fab9985261cb56df0dab8" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.536928 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-7rqjc" Dec 03 09:12:26 crc kubenswrapper[4576]: E1203 09:12:26.546079 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 03 09:12:26 crc kubenswrapper[4576]: E1203 09:12:26.546271 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nfqn6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-988np_openstack(437286d3-1147-43ef-945f-8612d1610427): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:12:26 crc kubenswrapper[4576]: E1203 09:12:26.548017 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-988np" podUID="437286d3-1147-43ef-945f-8612d1610427" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.588675 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.595246 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-7rqjc"] Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.694310 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.718691 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857152 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs\") pod \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857310 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts\") pod \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857340 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data\") pod \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857428 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs\") pod \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857462 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key\") pod \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857506 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwc9s\" (UniqueName: \"kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s\") pod \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857582 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts\") pod \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857621 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data\") pod \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857655 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxk8d\" (UniqueName: \"kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d\") pod \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\" (UID: \"dd7fc7a1-d409-4c09-97a6-dd063b1e7647\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.857685 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key\") pod \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\" (UID: \"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88\") " Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.858065 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs" (OuterVolumeSpecName: "logs") pod "dd7fc7a1-d409-4c09-97a6-dd063b1e7647" (UID: "dd7fc7a1-d409-4c09-97a6-dd063b1e7647"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.858505 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts" (OuterVolumeSpecName: "scripts") pod "dd7fc7a1-d409-4c09-97a6-dd063b1e7647" (UID: "dd7fc7a1-d409-4c09-97a6-dd063b1e7647"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.858993 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs" (OuterVolumeSpecName: "logs") pod "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" (UID: "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.859051 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts" (OuterVolumeSpecName: "scripts") pod "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" (UID: "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.859591 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data" (OuterVolumeSpecName: "config-data") pod "dd7fc7a1-d409-4c09-97a6-dd063b1e7647" (UID: "dd7fc7a1-d409-4c09-97a6-dd063b1e7647"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.860164 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data" (OuterVolumeSpecName: "config-data") pod "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" (UID: "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.863317 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "dd7fc7a1-d409-4c09-97a6-dd063b1e7647" (UID: "dd7fc7a1-d409-4c09-97a6-dd063b1e7647"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.863431 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s" (OuterVolumeSpecName: "kube-api-access-zwc9s") pod "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" (UID: "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88"). InnerVolumeSpecName "kube-api-access-zwc9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.864092 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d" (OuterVolumeSpecName: "kube-api-access-bxk8d") pod "dd7fc7a1-d409-4c09-97a6-dd063b1e7647" (UID: "dd7fc7a1-d409-4c09-97a6-dd063b1e7647"). InnerVolumeSpecName "kube-api-access-bxk8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.877622 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" (UID: "b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959342 4576 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959402 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwc9s\" (UniqueName: \"kubernetes.io/projected/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-kube-api-access-zwc9s\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959418 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959430 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959442 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxk8d\" (UniqueName: \"kubernetes.io/projected/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-kube-api-access-bxk8d\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959452 4576 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959464 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959474 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959484 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:26 crc kubenswrapper[4576]: I1203 09:12:26.959493 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd7fc7a1-d409-4c09-97a6-dd063b1e7647-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.547357 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79d544fc65-l8nxr" event={"ID":"dd7fc7a1-d409-4c09-97a6-dd063b1e7647","Type":"ContainerDied","Data":"779b4f4a0cdf463feac1dc479b4bf98ed2e242f99e8c3df8f5186ed9ed69067f"} Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.547456 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79d544fc65-l8nxr" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.551408 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-574f45d7f7-l8jvd" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.551443 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-574f45d7f7-l8jvd" event={"ID":"b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88","Type":"ContainerDied","Data":"3509987a2a85dfddefacd3a91b99730291a92bbaf17b23e695df8bf541c3b2d8"} Dec 03 09:12:27 crc kubenswrapper[4576]: E1203 09:12:27.552995 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-988np" podUID="437286d3-1147-43ef-945f-8612d1610427" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.621236 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.627391 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-79d544fc65-l8nxr"] Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.714842 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" path="/var/lib/kubelet/pods/0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a/volumes" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.715553 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd7fc7a1-d409-4c09-97a6-dd063b1e7647" path="/var/lib/kubelet/pods/dd7fc7a1-d409-4c09-97a6-dd063b1e7647/volumes" Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.715908 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:12:27 crc kubenswrapper[4576]: I1203 09:12:27.730701 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-574f45d7f7-l8jvd"] Dec 03 09:12:28 crc kubenswrapper[4576]: E1203 09:12:28.383924 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 03 09:12:28 crc kubenswrapper[4576]: E1203 09:12:28.384377 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h9njg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-qqg9g_openstack(047a7c95-c4e3-46fa-8b1b-2a351992493e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:12:28 crc kubenswrapper[4576]: E1203 09:12:28.385868 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-qqg9g" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" Dec 03 09:12:28 crc kubenswrapper[4576]: I1203 09:12:28.496331 4576 scope.go:117] "RemoveContainer" containerID="b0b2afca975767a6ed374848a7c36f2a562362953db74c5b6766f2ffcec73b4b" Dec 03 09:12:28 crc kubenswrapper[4576]: E1203 09:12:28.636219 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-qqg9g" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.132939 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.219196 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.316949 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d649695d8-6rtxn"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.337823 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w66xd"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.375083 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.490417 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.651063 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerStarted","Data":"e664fb6cd75f081387fcb211ce9d76ee8303303cfcff773bb2e57ddfe65e3642"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.655814 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerStarted","Data":"20439cbb4437596047ae1d1d54b225685aed94c6f506472fc29abbddb516cd5f"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.660329 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" event={"ID":"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2","Type":"ContainerStarted","Data":"ae26bdc5822d38bbd80e3c4408db0cebe0ca005518dd0e6970f1bfd22945ca15"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.664058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w66xd" event={"ID":"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07","Type":"ContainerStarted","Data":"8d81040bc44e1d98fb0fff5092b87f2a3e891af8ad873a8d8eebede777c59493"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.709191 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88" path="/var/lib/kubelet/pods/b9d376fd-9c92-4ed3-8f0a-bcd2f60ccb88/volumes" Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.709643 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2njh4" event={"ID":"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4","Type":"ContainerStarted","Data":"1c9aba1e8d982524aae075138b9deb785c987072ab1406b17dab3ab5d8338280"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.709682 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerStarted","Data":"ed9cf7cd116e078e6c51b9dfe988c73ac1d75e4edbaa78fda3c5bb47f277172d"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.724823 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerStarted","Data":"e51a405da7e87ca55e1870900eb57e11afd325b3d7ffea922cae69e8b636fb72"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.729664 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d649695d8-6rtxn" event={"ID":"288ed488-5270-4966-b866-f9f015262989","Type":"ContainerStarted","Data":"90511927957d8d98837f19b1613309d47649e52bf77760c61a9aa8b99d245e4c"} Dec 03 09:12:29 crc kubenswrapper[4576]: I1203 09:12:29.747731 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-2njh4" podStartSLOduration=3.103394084 podStartE2EDuration="57.747711415s" podCreationTimestamp="2025-12-03 09:11:32 +0000 UTC" firstStartedPulling="2025-12-03 09:11:33.852182776 +0000 UTC m=+1901.238159760" lastFinishedPulling="2025-12-03 09:12:28.496500107 +0000 UTC m=+1955.882477091" observedRunningTime="2025-12-03 09:12:29.733078016 +0000 UTC m=+1957.119055030" watchObservedRunningTime="2025-12-03 09:12:29.747711415 +0000 UTC m=+1957.133688399" Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.746248 4576 generic.go:334] "Generic (PLEG): container finished" podID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerID="91903483448a43d2d3bbebe8ba153627d8cf9b45b44c888db9f454047ae87870" exitCode=0 Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.746311 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" event={"ID":"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2","Type":"ContainerDied","Data":"91903483448a43d2d3bbebe8ba153627d8cf9b45b44c888db9f454047ae87870"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.752016 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w66xd" event={"ID":"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07","Type":"ContainerStarted","Data":"d1d90449a4995f5ce23629369d06b9e4e6817aa4c307f6795c657b448f4718da"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.758864 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerStarted","Data":"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.769757 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerStarted","Data":"0e93af1f829fabfd25fb5508749b97f93a24946a9aad4f32ce7ce367425a0bee"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.789124 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d649695d8-6rtxn" event={"ID":"288ed488-5270-4966-b866-f9f015262989","Type":"ContainerStarted","Data":"9f5ff905e527c9bc0492d99085b73d8e771b4c6d9cdf3697f05e3ac1c5fcf4e2"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.791721 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerStarted","Data":"7d0b7cd27e003f421aed9f9bd052f518c8725957e7d74fc0d06f8eec991dd0ff"} Dec 03 09:12:30 crc kubenswrapper[4576]: I1203 09:12:30.798210 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-w66xd" podStartSLOduration=35.798186625 podStartE2EDuration="35.798186625s" podCreationTimestamp="2025-12-03 09:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:30.794940756 +0000 UTC m=+1958.180917740" watchObservedRunningTime="2025-12-03 09:12:30.798186625 +0000 UTC m=+1958.184163609" Dec 03 09:12:31 crc kubenswrapper[4576]: I1203 09:12:31.801097 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerStarted","Data":"ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084"} Dec 03 09:12:31 crc kubenswrapper[4576]: I1203 09:12:31.804708 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerStarted","Data":"5d7cf75517fd960a5d34f639d87d800609197a424de9cb47ca7056ea2c30fb0b"} Dec 03 09:12:31 crc kubenswrapper[4576]: I1203 09:12:31.804888 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-log" containerID="cri-o://0e93af1f829fabfd25fb5508749b97f93a24946a9aad4f32ce7ce367425a0bee" gracePeriod=30 Dec 03 09:12:31 crc kubenswrapper[4576]: I1203 09:12:31.805443 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-httpd" containerID="cri-o://5d7cf75517fd960a5d34f639d87d800609197a424de9cb47ca7056ea2c30fb0b" gracePeriod=30 Dec 03 09:12:31 crc kubenswrapper[4576]: I1203 09:12:31.814017 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d649695d8-6rtxn" event={"ID":"288ed488-5270-4966-b866-f9f015262989","Type":"ContainerStarted","Data":"bfc5b586bd45522f00431a1d74aa16840b99866538d2111ba820658948c181a3"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.848683 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerStarted","Data":"32b57303e9fb81ccf028911c977c5217cead677e6f4bbdef90a0052653a5853f"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.856301 4576 generic.go:334] "Generic (PLEG): container finished" podID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerID="5d7cf75517fd960a5d34f639d87d800609197a424de9cb47ca7056ea2c30fb0b" exitCode=0 Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.856385 4576 generic.go:334] "Generic (PLEG): container finished" podID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerID="0e93af1f829fabfd25fb5508749b97f93a24946a9aad4f32ce7ce367425a0bee" exitCode=143 Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.856409 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerDied","Data":"5d7cf75517fd960a5d34f639d87d800609197a424de9cb47ca7056ea2c30fb0b"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.856467 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerDied","Data":"0e93af1f829fabfd25fb5508749b97f93a24946a9aad4f32ce7ce367425a0bee"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.866881 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerStarted","Data":"6fbc96c6803055a142fc667535c37b51bd3a2b589856a093a689373ee84417b7"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.867035 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-log" containerID="cri-o://7d0b7cd27e003f421aed9f9bd052f518c8725957e7d74fc0d06f8eec991dd0ff" gracePeriod=30 Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.867579 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-httpd" containerID="cri-o://6fbc96c6803055a142fc667535c37b51bd3a2b589856a093a689373ee84417b7" gracePeriod=30 Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.881331 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" event={"ID":"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2","Type":"ContainerStarted","Data":"3700fddfb4a90f02b766ca62a7992ef81fcf3023ee2a1a3f6b6e632eb66beab6"} Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.881663 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.909901 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=37.909881847 podStartE2EDuration="37.909881847s" podCreationTimestamp="2025-12-03 09:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:32.900980385 +0000 UTC m=+1960.286957389" watchObservedRunningTime="2025-12-03 09:12:32.909881847 +0000 UTC m=+1960.295858841" Dec 03 09:12:32 crc kubenswrapper[4576]: I1203 09:12:32.917965 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=37.917943968 podStartE2EDuration="37.917943968s" podCreationTimestamp="2025-12-03 09:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:31.864136737 +0000 UTC m=+1959.250113731" watchObservedRunningTime="2025-12-03 09:12:32.917943968 +0000 UTC m=+1960.303920962" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.096338 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6d649695d8-6rtxn" podStartSLOduration=51.398381507 podStartE2EDuration="52.096289145s" podCreationTimestamp="2025-12-03 09:11:41 +0000 UTC" firstStartedPulling="2025-12-03 09:12:29.314893292 +0000 UTC m=+1956.700870276" lastFinishedPulling="2025-12-03 09:12:30.01280093 +0000 UTC m=+1957.398777914" observedRunningTime="2025-12-03 09:12:32.949340484 +0000 UTC m=+1960.335317468" watchObservedRunningTime="2025-12-03 09:12:33.096289145 +0000 UTC m=+1960.482266139" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.143925 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.153716 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" podStartSLOduration=38.153698391 podStartE2EDuration="38.153698391s" podCreationTimestamp="2025-12-03 09:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:33.109157476 +0000 UTC m=+1960.495134460" watchObservedRunningTime="2025-12-03 09:12:33.153698391 +0000 UTC m=+1960.539675375" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.155457 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5d9b9454d4-cbqlk" podStartSLOduration=51.478931166 podStartE2EDuration="52.155449739s" podCreationTimestamp="2025-12-03 09:11:41 +0000 UTC" firstStartedPulling="2025-12-03 09:12:29.238729774 +0000 UTC m=+1956.624706758" lastFinishedPulling="2025-12-03 09:12:29.915248347 +0000 UTC m=+1957.301225331" observedRunningTime="2025-12-03 09:12:33.150180956 +0000 UTC m=+1960.536157960" watchObservedRunningTime="2025-12-03 09:12:33.155449739 +0000 UTC m=+1960.541426723" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.172999 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173048 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173079 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173112 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173130 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173175 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vqp8\" (UniqueName: \"kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173209 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"c99775a3-39b9-42bf-8193-1fc88285efd5\" (UID: \"c99775a3-39b9-42bf-8193-1fc88285efd5\") " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.173755 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.208846 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.209885 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.209917 4576 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.212594 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs" (OuterVolumeSpecName: "logs") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.216000 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8" (OuterVolumeSpecName: "kube-api-access-4vqp8") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "kube-api-access-4vqp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.278957 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts" (OuterVolumeSpecName: "scripts") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.298804 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.299043 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.312008 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c99775a3-39b9-42bf-8193-1fc88285efd5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.312037 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.312052 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.312061 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vqp8\" (UniqueName: \"kubernetes.io/projected/c99775a3-39b9-42bf-8193-1fc88285efd5-kube-api-access-4vqp8\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.312070 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.341122 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data" (OuterVolumeSpecName: "config-data") pod "c99775a3-39b9-42bf-8193-1fc88285efd5" (UID: "c99775a3-39b9-42bf-8193-1fc88285efd5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.413786 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c99775a3-39b9-42bf-8193-1fc88285efd5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.892638 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c99775a3-39b9-42bf-8193-1fc88285efd5","Type":"ContainerDied","Data":"e51a405da7e87ca55e1870900eb57e11afd325b3d7ffea922cae69e8b636fb72"} Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.892701 4576 scope.go:117] "RemoveContainer" containerID="5d7cf75517fd960a5d34f639d87d800609197a424de9cb47ca7056ea2c30fb0b" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.892721 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.895665 4576 generic.go:334] "Generic (PLEG): container finished" podID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerID="6fbc96c6803055a142fc667535c37b51bd3a2b589856a093a689373ee84417b7" exitCode=0 Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.895695 4576 generic.go:334] "Generic (PLEG): container finished" podID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerID="7d0b7cd27e003f421aed9f9bd052f518c8725957e7d74fc0d06f8eec991dd0ff" exitCode=143 Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.896180 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerDied","Data":"6fbc96c6803055a142fc667535c37b51bd3a2b589856a093a689373ee84417b7"} Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.896261 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerDied","Data":"7d0b7cd27e003f421aed9f9bd052f518c8725957e7d74fc0d06f8eec991dd0ff"} Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.916619 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.926374 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.954960 4576 scope.go:117] "RemoveContainer" containerID="0e93af1f829fabfd25fb5508749b97f93a24946a9aad4f32ce7ce367425a0bee" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.992345 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:33 crc kubenswrapper[4576]: E1203 09:12:33.992892 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-log" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.992913 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-log" Dec 03 09:12:33 crc kubenswrapper[4576]: E1203 09:12:33.992928 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="init" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.992936 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="init" Dec 03 09:12:33 crc kubenswrapper[4576]: E1203 09:12:33.992957 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.992965 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" Dec 03 09:12:33 crc kubenswrapper[4576]: E1203 09:12:33.992974 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-httpd" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.992981 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-httpd" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.993174 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d9c9fe7-e79c-4dff-a924-8dd9f4b7939a" containerName="dnsmasq-dns" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.993191 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-httpd" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.993220 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" containerName="glance-log" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.994409 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:12:33 crc kubenswrapper[4576]: I1203 09:12:33.999660 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.000844 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.000923 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139494 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfddc\" (UniqueName: \"kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139558 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139586 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139617 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139634 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139674 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139751 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.139777 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241004 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241050 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241109 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241227 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241265 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241299 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfddc\" (UniqueName: \"kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241343 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241376 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.241848 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.242010 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.242165 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.247445 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.247595 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.250381 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.258586 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.262385 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfddc\" (UniqueName: \"kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.276718 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " pod="openstack/glance-default-external-api-0" Dec 03 09:12:34 crc kubenswrapper[4576]: I1203 09:12:34.317465 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.220830 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.366724 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367146 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367173 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367243 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367242 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs" (OuterVolumeSpecName: "logs") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367297 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367338 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99vhf\" (UniqueName: \"kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367379 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data\") pod \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\" (UID: \"2d3c310e-5b57-4fe1-a940-df0f6faedfc1\") " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367761 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.367989 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.374246 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts" (OuterVolumeSpecName: "scripts") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.383263 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf" (OuterVolumeSpecName: "kube-api-access-99vhf") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "kube-api-access-99vhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.403143 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.427014 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.440145 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.470453 4576 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.470501 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.470516 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99vhf\" (UniqueName: \"kubernetes.io/projected/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-kube-api-access-99vhf\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.470584 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.470601 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.488581 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data" (OuterVolumeSpecName: "config-data") pod "2d3c310e-5b57-4fe1-a940-df0f6faedfc1" (UID: "2d3c310e-5b57-4fe1-a940-df0f6faedfc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.508672 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.574231 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3c310e-5b57-4fe1-a940-df0f6faedfc1-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.574301 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.699349 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c99775a3-39b9-42bf-8193-1fc88285efd5" path="/var/lib/kubelet/pods/c99775a3-39b9-42bf-8193-1fc88285efd5/volumes" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.937675 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerStarted","Data":"2244e1007be836cf9e5eb42da9134fa430ef1a18e765c97ec6049bc613bbdf04"} Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.958439 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2d3c310e-5b57-4fe1-a940-df0f6faedfc1","Type":"ContainerDied","Data":"20439cbb4437596047ae1d1d54b225685aed94c6f506472fc29abbddb516cd5f"} Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.958512 4576 scope.go:117] "RemoveContainer" containerID="6fbc96c6803055a142fc667535c37b51bd3a2b589856a093a689373ee84417b7" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.958512 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.990108 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:35 crc kubenswrapper[4576]: I1203 09:12:35.996502 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.017751 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:36 crc kubenswrapper[4576]: E1203 09:12:36.018134 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-httpd" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.018157 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-httpd" Dec 03 09:12:36 crc kubenswrapper[4576]: E1203 09:12:36.018181 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-log" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.018191 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-log" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.018402 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-httpd" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.018423 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" containerName="glance-log" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.019336 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.022856 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.023065 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.042853 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201142 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201211 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201251 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201313 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201365 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201391 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cbqw\" (UniqueName: \"kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201441 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.201480 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303178 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cbqw\" (UniqueName: \"kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303271 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303333 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303381 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303410 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303444 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.303584 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.304136 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.304196 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.304209 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.304496 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.310788 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.310831 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.317689 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.340926 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.343805 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cbqw\" (UniqueName: \"kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.418163 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.717184 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:36 crc kubenswrapper[4576]: I1203 09:12:36.975342 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerStarted","Data":"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d"} Dec 03 09:12:37 crc kubenswrapper[4576]: I1203 09:12:37.688311 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d3c310e-5b57-4fe1-a940-df0f6faedfc1" path="/var/lib/kubelet/pods/2d3c310e-5b57-4fe1-a940-df0f6faedfc1/volumes" Dec 03 09:12:40 crc kubenswrapper[4576]: I1203 09:12:40.901898 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:40.995659 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:40.995933 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="dnsmasq-dns" containerID="cri-o://ec00d3b0d8f59cfb96ee204c863c49f8643d79ffdf4edeb79161f50f54360d0e" gracePeriod=10 Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.745184 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.745246 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.746901 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.908856 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.908922 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:12:41 crc kubenswrapper[4576]: I1203 09:12:41.910492 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 09:12:42 crc kubenswrapper[4576]: I1203 09:12:42.044124 4576 generic.go:334] "Generic (PLEG): container finished" podID="bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" containerID="d1d90449a4995f5ce23629369d06b9e4e6817aa4c307f6795c657b448f4718da" exitCode=0 Dec 03 09:12:42 crc kubenswrapper[4576]: I1203 09:12:42.044213 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w66xd" event={"ID":"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07","Type":"ContainerDied","Data":"d1d90449a4995f5ce23629369d06b9e4e6817aa4c307f6795c657b448f4718da"} Dec 03 09:12:42 crc kubenswrapper[4576]: I1203 09:12:42.049379 4576 generic.go:334] "Generic (PLEG): container finished" podID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerID="ec00d3b0d8f59cfb96ee204c863c49f8643d79ffdf4edeb79161f50f54360d0e" exitCode=0 Dec 03 09:12:42 crc kubenswrapper[4576]: I1203 09:12:42.049432 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" event={"ID":"0af2ec10-0960-41c2-bbb9-aeffd38aa679","Type":"ContainerDied","Data":"ec00d3b0d8f59cfb96ee204c863c49f8643d79ffdf4edeb79161f50f54360d0e"} Dec 03 09:12:42 crc kubenswrapper[4576]: I1203 09:12:42.728072 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.141:5353: connect: connection refused" Dec 03 09:12:43 crc kubenswrapper[4576]: I1203 09:12:43.072831 4576 generic.go:334] "Generic (PLEG): container finished" podID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" containerID="1c9aba1e8d982524aae075138b9deb785c987072ab1406b17dab3ab5d8338280" exitCode=0 Dec 03 09:12:43 crc kubenswrapper[4576]: I1203 09:12:43.073038 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2njh4" event={"ID":"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4","Type":"ContainerDied","Data":"1c9aba1e8d982524aae075138b9deb785c987072ab1406b17dab3ab5d8338280"} Dec 03 09:12:43 crc kubenswrapper[4576]: I1203 09:12:43.835153 4576 scope.go:117] "RemoveContainer" containerID="7d0b7cd27e003f421aed9f9bd052f518c8725957e7d74fc0d06f8eec991dd0ff" Dec 03 09:12:43 crc kubenswrapper[4576]: I1203 09:12:43.926163 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087538 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8s5j\" (UniqueName: \"kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087597 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087755 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087792 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087841 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.087855 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts\") pod \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\" (UID: \"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.111456 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j" (OuterVolumeSpecName: "kube-api-access-m8s5j") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "kube-api-access-m8s5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.122723 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.137462 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts" (OuterVolumeSpecName: "scripts") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.147615 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.155295 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w66xd" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.155914 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w66xd" event={"ID":"bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07","Type":"ContainerDied","Data":"8d81040bc44e1d98fb0fff5092b87f2a3e891af8ad873a8d8eebede777c59493"} Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.156131 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d81040bc44e1d98fb0fff5092b87f2a3e891af8ad873a8d8eebede777c59493" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.162294 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.169186 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" event={"ID":"0af2ec10-0960-41c2-bbb9-aeffd38aa679","Type":"ContainerDied","Data":"60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff"} Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.169227 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60bb78361af744a107b206fb4692c9fa342ab1aad577633fd2b8064f6f608bff" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.169917 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.202775 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.203012 4576 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.203022 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.203030 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8s5j\" (UniqueName: \"kubernetes.io/projected/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-kube-api-access-m8s5j\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.203040 4576 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.218752 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data" (OuterVolumeSpecName: "config-data") pod "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" (UID: "bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.283938 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-69657bfb7-ncr7l"] Dec 03 09:12:44 crc kubenswrapper[4576]: E1203 09:12:44.284371 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" containerName="keystone-bootstrap" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.284384 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" containerName="keystone-bootstrap" Dec 03 09:12:44 crc kubenswrapper[4576]: E1203 09:12:44.284401 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="init" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.284407 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="init" Dec 03 09:12:44 crc kubenswrapper[4576]: E1203 09:12:44.284422 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="dnsmasq-dns" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.284428 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="dnsmasq-dns" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.284614 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" containerName="dnsmasq-dns" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.284629 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" containerName="keystone-bootstrap" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.285209 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.297040 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.297232 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.298595 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-69657bfb7-ncr7l"] Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304485 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304646 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k6g6\" (UniqueName: \"kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304667 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304691 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304746 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.304787 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb\") pod \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\" (UID: \"0af2ec10-0960-41c2-bbb9-aeffd38aa679\") " Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.305975 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.334184 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6" (OuterVolumeSpecName: "kube-api-access-5k6g6") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "kube-api-access-5k6g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-public-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407624 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-fernet-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407665 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-config-data\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407727 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-combined-ca-bundle\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407744 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-internal-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407759 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frkf7\" (UniqueName: \"kubernetes.io/projected/ffc7abfc-aecf-42de-8947-143cd7bda142-kube-api-access-frkf7\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407788 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-credential-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.407853 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-scripts\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.408247 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k6g6\" (UniqueName: \"kubernetes.io/projected/0af2ec10-0960-41c2-bbb9-aeffd38aa679-kube-api-access-5k6g6\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.428715 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config" (OuterVolumeSpecName: "config") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513690 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-config-data\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513769 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-combined-ca-bundle\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513787 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-internal-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513804 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frkf7\" (UniqueName: \"kubernetes.io/projected/ffc7abfc-aecf-42de-8947-143cd7bda142-kube-api-access-frkf7\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513832 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-credential-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513875 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-scripts\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513928 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-public-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513946 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-fernet-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.513994 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.524307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-combined-ca-bundle\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.526134 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-internal-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.527827 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-config-data\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.757096 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-public-tls-certs\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.757343 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-scripts\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.758996 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-fernet-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.759279 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frkf7\" (UniqueName: \"kubernetes.io/projected/ffc7abfc-aecf-42de-8947-143cd7bda142-kube-api-access-frkf7\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.759613 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffc7abfc-aecf-42de-8947-143cd7bda142-credential-keys\") pod \"keystone-69657bfb7-ncr7l\" (UID: \"ffc7abfc-aecf-42de-8947-143cd7bda142\") " pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.831487 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.889568 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.893889 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.899838 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0af2ec10-0960-41c2-bbb9-aeffd38aa679" (UID: "0af2ec10-0960-41c2-bbb9-aeffd38aa679"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.925504 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.925548 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.925558 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.925566 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0af2ec10-0960-41c2-bbb9-aeffd38aa679-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:44 crc kubenswrapper[4576]: I1203 09:12:44.926870 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.006702 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.049850 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2njh4" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.136452 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2phz\" (UniqueName: \"kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz\") pod \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.136604 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts\") pod \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.136686 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle\") pod \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.136743 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs\") pod \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.136769 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data\") pod \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\" (UID: \"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4\") " Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.141800 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs" (OuterVolumeSpecName: "logs") pod "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" (UID: "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.170793 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts" (OuterVolumeSpecName: "scripts") pod "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" (UID: "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.172920 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz" (OuterVolumeSpecName: "kube-api-access-q2phz") pod "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" (UID: "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4"). InnerVolumeSpecName "kube-api-access-q2phz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.212181 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data" (OuterVolumeSpecName: "config-data") pod "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" (UID: "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.238767 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2phz\" (UniqueName: \"kubernetes.io/projected/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-kube-api-access-q2phz\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.238805 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.238817 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.238827 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.239806 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-988np" event={"ID":"437286d3-1147-43ef-945f-8612d1610427","Type":"ContainerStarted","Data":"3da15af9b8e6b4bca051ee591606a6e45ab32d9dcae3bf0a80c6ecd25215d4ed"} Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.258047 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerStarted","Data":"00ce442ce26529ab960c7feb1c8b9e595718e6f3b9d9f9e4d6aca2db3a56bf56"} Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.264447 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5f666f544-xj7sz"] Dec 03 09:12:45 crc kubenswrapper[4576]: E1203 09:12:45.264873 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" containerName="placement-db-sync" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.264888 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" containerName="placement-db-sync" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.265068 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" containerName="placement-db-sync" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.265973 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.281624 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f666f544-xj7sz"] Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.282716 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" (UID: "cdef871a-e0bf-42eb-b9d5-bcf0777fbec4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.283204 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.283408 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.287341 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-988np" podStartSLOduration=3.799889855 podStartE2EDuration="1m14.287320474s" podCreationTimestamp="2025-12-03 09:11:31 +0000 UTC" firstStartedPulling="2025-12-03 09:11:33.595193427 +0000 UTC m=+1900.981170411" lastFinishedPulling="2025-12-03 09:12:44.082624046 +0000 UTC m=+1971.468601030" observedRunningTime="2025-12-03 09:12:45.283616393 +0000 UTC m=+1972.669593377" watchObservedRunningTime="2025-12-03 09:12:45.287320474 +0000 UTC m=+1972.673297488" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.341999 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2njh4" event={"ID":"cdef871a-e0bf-42eb-b9d5-bcf0777fbec4","Type":"ContainerDied","Data":"0a7b7045f4efb8b09ff0a1e38631882bc81491a4e943cb69e7fdbb760bed1cad"} Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.342047 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a7b7045f4efb8b09ff0a1e38631882bc81491a4e943cb69e7fdbb760bed1cad" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.342109 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2njh4" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.348876 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-public-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.348928 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-combined-ca-bundle\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349022 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-internal-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349265 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-config-data\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349369 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc2sx\" (UniqueName: \"kubernetes.io/projected/a1aaa45e-8e67-4360-b4db-2d2866d00865-kube-api-access-jc2sx\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349452 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-scripts\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349475 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1aaa45e-8e67-4360-b4db-2d2866d00865-logs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.349766 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.414181 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-kvbcw" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.416415 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerStarted","Data":"1902fc43fcd456aa6d39a99c52b62c6e954722493df5faeacabc5d4d0f9e6e1b"} Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456503 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-config-data\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456571 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc2sx\" (UniqueName: \"kubernetes.io/projected/a1aaa45e-8e67-4360-b4db-2d2866d00865-kube-api-access-jc2sx\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456610 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-scripts\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456629 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1aaa45e-8e67-4360-b4db-2d2866d00865-logs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456692 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-public-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456714 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-combined-ca-bundle\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.456764 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-internal-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.465730 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-internal-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.466028 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1aaa45e-8e67-4360-b4db-2d2866d00865-logs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.470883 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-combined-ca-bundle\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.482749 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-config-data\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.483827 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-public-tls-certs\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.505931 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1aaa45e-8e67-4360-b4db-2d2866d00865-scripts\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.524837 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc2sx\" (UniqueName: \"kubernetes.io/projected/a1aaa45e-8e67-4360-b4db-2d2866d00865-kube-api-access-jc2sx\") pod \"placement-5f666f544-xj7sz\" (UID: \"a1aaa45e-8e67-4360-b4db-2d2866d00865\") " pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.595157 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.602324 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-kvbcw"] Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.689755 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.705897 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0af2ec10-0960-41c2-bbb9-aeffd38aa679" path="/var/lib/kubelet/pods/0af2ec10-0960-41c2-bbb9-aeffd38aa679/volumes" Dec 03 09:12:45 crc kubenswrapper[4576]: I1203 09:12:45.853309 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-69657bfb7-ncr7l"] Dec 03 09:12:45 crc kubenswrapper[4576]: W1203 09:12:45.871874 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffc7abfc_aecf_42de_8947_143cd7bda142.slice/crio-75806ce65969ed0f7a7e041426e4b90661679bcb1552dc5c771d49b783dc619a WatchSource:0}: Error finding container 75806ce65969ed0f7a7e041426e4b90661679bcb1552dc5c771d49b783dc619a: Status 404 returned error can't find the container with id 75806ce65969ed0f7a7e041426e4b90661679bcb1552dc5c771d49b783dc619a Dec 03 09:12:46 crc kubenswrapper[4576]: I1203 09:12:46.115733 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f666f544-xj7sz"] Dec 03 09:12:46 crc kubenswrapper[4576]: W1203 09:12:46.136868 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1aaa45e_8e67_4360_b4db_2d2866d00865.slice/crio-6e58ca758270dfa01ea28e4e624d473c5316b9cfe5d1757439996db8a6143326 WatchSource:0}: Error finding container 6e58ca758270dfa01ea28e4e624d473c5316b9cfe5d1757439996db8a6143326: Status 404 returned error can't find the container with id 6e58ca758270dfa01ea28e4e624d473c5316b9cfe5d1757439996db8a6143326 Dec 03 09:12:46 crc kubenswrapper[4576]: I1203 09:12:46.423598 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-69657bfb7-ncr7l" event={"ID":"ffc7abfc-aecf-42de-8947-143cd7bda142","Type":"ContainerStarted","Data":"75806ce65969ed0f7a7e041426e4b90661679bcb1552dc5c771d49b783dc619a"} Dec 03 09:12:46 crc kubenswrapper[4576]: I1203 09:12:46.425240 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f666f544-xj7sz" event={"ID":"a1aaa45e-8e67-4360-b4db-2d2866d00865","Type":"ContainerStarted","Data":"6e58ca758270dfa01ea28e4e624d473c5316b9cfe5d1757439996db8a6143326"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.465308 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerStarted","Data":"993d7d5ae0d126496d669c638716a16f232631f0a0b7e46c5aed880460e30082"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.476589 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-69657bfb7-ncr7l" event={"ID":"ffc7abfc-aecf-42de-8947-143cd7bda142","Type":"ContainerStarted","Data":"1d9a49afad66bbf667248b4437104010a8df727f73b2521eb58c66e61ce92577"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.488489 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerStarted","Data":"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.493498 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f666f544-xj7sz" event={"ID":"a1aaa45e-8e67-4360-b4db-2d2866d00865","Type":"ContainerStarted","Data":"c406f5c2c7976cb7c571a35e0b74b8d476bfffcc84bf08356fce8b14dde41a82"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.499933 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qqg9g" event={"ID":"047a7c95-c4e3-46fa-8b1b-2a351992493e","Type":"ContainerStarted","Data":"46008041e9328f5c70ea5d61480b89615e431713d233332f8517865e11ec2e38"} Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.592014 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=14.591989384 podStartE2EDuration="14.591989384s" podCreationTimestamp="2025-12-03 09:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:47.536082348 +0000 UTC m=+1974.922059332" watchObservedRunningTime="2025-12-03 09:12:47.591989384 +0000 UTC m=+1974.977966368" Dec 03 09:12:47 crc kubenswrapper[4576]: I1203 09:12:47.618566 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-qqg9g" podStartSLOduration=5.368654671 podStartE2EDuration="1m16.618520018s" podCreationTimestamp="2025-12-03 09:11:31 +0000 UTC" firstStartedPulling="2025-12-03 09:11:33.396827663 +0000 UTC m=+1900.782804647" lastFinishedPulling="2025-12-03 09:12:44.64669301 +0000 UTC m=+1972.032669994" observedRunningTime="2025-12-03 09:12:47.604860205 +0000 UTC m=+1974.990837199" watchObservedRunningTime="2025-12-03 09:12:47.618520018 +0000 UTC m=+1975.004497002" Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.532276 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f666f544-xj7sz" event={"ID":"a1aaa45e-8e67-4360-b4db-2d2866d00865","Type":"ContainerStarted","Data":"c61d32636c5bc9597cf3e5094d363fc0b768a263b88ddc39a037671cf1e389a7"} Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.533163 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.533216 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.560909 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5f666f544-xj7sz" podStartSLOduration=3.560887927 podStartE2EDuration="3.560887927s" podCreationTimestamp="2025-12-03 09:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:48.553816174 +0000 UTC m=+1975.939793168" watchObservedRunningTime="2025-12-03 09:12:48.560887927 +0000 UTC m=+1975.946864901" Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.561670 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:12:48 crc kubenswrapper[4576]: I1203 09:12:48.598094 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-69657bfb7-ncr7l" podStartSLOduration=4.598068562 podStartE2EDuration="4.598068562s" podCreationTimestamp="2025-12-03 09:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:48.588249954 +0000 UTC m=+1975.974226938" watchObservedRunningTime="2025-12-03 09:12:48.598068562 +0000 UTC m=+1975.984045546" Dec 03 09:12:49 crc kubenswrapper[4576]: I1203 09:12:49.571953 4576 generic.go:334] "Generic (PLEG): container finished" podID="a1b7c2fb-e839-4698-8319-3f8eae6e46d6" containerID="763ab8a21ac3fe408c84d9db83465d682f64fd4ed25ba98019c21823b4135d4a" exitCode=0 Dec 03 09:12:49 crc kubenswrapper[4576]: I1203 09:12:49.572024 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwbz9" event={"ID":"a1b7c2fb-e839-4698-8319-3f8eae6e46d6","Type":"ContainerDied","Data":"763ab8a21ac3fe408c84d9db83465d682f64fd4ed25ba98019c21823b4135d4a"} Dec 03 09:12:49 crc kubenswrapper[4576]: I1203 09:12:49.577665 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerStarted","Data":"ec6fb260ace858ff3252caf06df438bd629d9b661dc002528047350d813cb3f2"} Dec 03 09:12:49 crc kubenswrapper[4576]: I1203 09:12:49.634356 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=14.634331674 podStartE2EDuration="14.634331674s" podCreationTimestamp="2025-12-03 09:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:12:49.62394375 +0000 UTC m=+1977.009920754" watchObservedRunningTime="2025-12-03 09:12:49.634331674 +0000 UTC m=+1977.020308658" Dec 03 09:12:50 crc kubenswrapper[4576]: I1203 09:12:50.587490 4576 generic.go:334] "Generic (PLEG): container finished" podID="437286d3-1147-43ef-945f-8612d1610427" containerID="3da15af9b8e6b4bca051ee591606a6e45ab32d9dcae3bf0a80c6ecd25215d4ed" exitCode=0 Dec 03 09:12:50 crc kubenswrapper[4576]: I1203 09:12:50.587579 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-988np" event={"ID":"437286d3-1147-43ef-945f-8612d1610427","Type":"ContainerDied","Data":"3da15af9b8e6b4bca051ee591606a6e45ab32d9dcae3bf0a80c6ecd25215d4ed"} Dec 03 09:12:50 crc kubenswrapper[4576]: I1203 09:12:50.906421 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.012488 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config\") pod \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.012808 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsjgx\" (UniqueName: \"kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx\") pod \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.013029 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle\") pod \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\" (UID: \"a1b7c2fb-e839-4698-8319-3f8eae6e46d6\") " Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.033834 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx" (OuterVolumeSpecName: "kube-api-access-zsjgx") pod "a1b7c2fb-e839-4698-8319-3f8eae6e46d6" (UID: "a1b7c2fb-e839-4698-8319-3f8eae6e46d6"). InnerVolumeSpecName "kube-api-access-zsjgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.052781 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config" (OuterVolumeSpecName: "config") pod "a1b7c2fb-e839-4698-8319-3f8eae6e46d6" (UID: "a1b7c2fb-e839-4698-8319-3f8eae6e46d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.078753 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1b7c2fb-e839-4698-8319-3f8eae6e46d6" (UID: "a1b7c2fb-e839-4698-8319-3f8eae6e46d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.117085 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.117146 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsjgx\" (UniqueName: \"kubernetes.io/projected/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-kube-api-access-zsjgx\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.117162 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1b7c2fb-e839-4698-8319-3f8eae6e46d6-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.600838 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qwbz9" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.601680 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qwbz9" event={"ID":"a1b7c2fb-e839-4698-8319-3f8eae6e46d6","Type":"ContainerDied","Data":"0aa870d89d046595377172bc442499c143e5905c5a22bee93c1634f6bc3e5cc1"} Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.601739 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0aa870d89d046595377172bc442499c143e5905c5a22bee93c1634f6bc3e5cc1" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.748147 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.892289 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:12:51 crc kubenswrapper[4576]: E1203 09:12:51.892705 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b7c2fb-e839-4698-8319-3f8eae6e46d6" containerName="neutron-db-sync" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.892717 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b7c2fb-e839-4698-8319-3f8eae6e46d6" containerName="neutron-db-sync" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.892904 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b7c2fb-e839-4698-8319-3f8eae6e46d6" containerName="neutron-db-sync" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.893764 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.914580 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.916585 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.917982 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933826 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933872 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933900 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933919 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933949 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkcj2\" (UniqueName: \"kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.933966 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.934306 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-lk4g6" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.940911 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.941173 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.944192 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.947051 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:12:51 crc kubenswrapper[4576]: I1203 09:12:51.960713 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.036821 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.036894 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkcj2\" (UniqueName: \"kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.036917 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.043771 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044037 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044349 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044556 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044606 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm6gc\" (UniqueName: \"kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044686 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044763 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044810 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.044828 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.045220 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.045878 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.046075 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.046293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.065589 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkcj2\" (UniqueName: \"kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2\") pod \"dnsmasq-dns-6b7b667979-knzgm\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.147520 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm6gc\" (UniqueName: \"kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.147590 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.147609 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.147640 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.147717 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.159687 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.160146 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.160652 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.169243 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.198482 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm6gc\" (UniqueName: \"kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc\") pod \"neutron-667d896bdd-mtmgs\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.243295 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:12:52 crc kubenswrapper[4576]: I1203 09:12:52.270159 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.154014 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f978d8b99-7d88p"] Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.155740 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.157960 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.158031 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.183913 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7swx\" (UniqueName: \"kubernetes.io/projected/cdfca795-30e9-4534-9084-e34e01ab71ae-kube-api-access-k7swx\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.183983 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-combined-ca-bundle\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.184015 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-public-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.184052 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.184097 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-httpd-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.184124 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-ovndb-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.184166 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-internal-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.185719 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f978d8b99-7d88p"] Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.288897 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7swx\" (UniqueName: \"kubernetes.io/projected/cdfca795-30e9-4534-9084-e34e01ab71ae-kube-api-access-k7swx\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.288966 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-combined-ca-bundle\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.289007 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-public-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.289043 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.289110 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-httpd-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.289135 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-ovndb-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.289184 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-internal-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.304411 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-combined-ca-bundle\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.304466 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-ovndb-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.305234 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.310251 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-public-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.315174 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7swx\" (UniqueName: \"kubernetes.io/projected/cdfca795-30e9-4534-9084-e34e01ab71ae-kube-api-access-k7swx\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.319777 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-httpd-config\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.319977 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.320015 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.328096 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdfca795-30e9-4534-9084-e34e01ab71ae-internal-tls-certs\") pod \"neutron-6f978d8b99-7d88p\" (UID: \"cdfca795-30e9-4534-9084-e34e01ab71ae\") " pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.454706 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.458376 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.476272 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.645471 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 09:12:54 crc kubenswrapper[4576]: I1203 09:12:54.645724 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.679673 4576 generic.go:334] "Generic (PLEG): container finished" podID="047a7c95-c4e3-46fa-8b1b-2a351992493e" containerID="46008041e9328f5c70ea5d61480b89615e431713d233332f8517865e11ec2e38" exitCode=0 Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.679855 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qqg9g" event={"ID":"047a7c95-c4e3-46fa-8b1b-2a351992493e","Type":"ContainerDied","Data":"46008041e9328f5c70ea5d61480b89615e431713d233332f8517865e11ec2e38"} Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.721353 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.721410 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.779137 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:56 crc kubenswrapper[4576]: I1203 09:12:56.782683 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:57 crc kubenswrapper[4576]: I1203 09:12:57.697611 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:57 crc kubenswrapper[4576]: I1203 09:12:57.697939 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 09:12:58 crc kubenswrapper[4576]: I1203 09:12:58.259010 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 09:12:58 crc kubenswrapper[4576]: I1203 09:12:58.259534 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:12:58 crc kubenswrapper[4576]: I1203 09:12:58.289509 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.164634 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-988np" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.288939 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data\") pod \"437286d3-1147-43ef-945f-8612d1610427\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.289007 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfqn6\" (UniqueName: \"kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6\") pod \"437286d3-1147-43ef-945f-8612d1610427\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.289245 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle\") pod \"437286d3-1147-43ef-945f-8612d1610427\" (UID: \"437286d3-1147-43ef-945f-8612d1610427\") " Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.316723 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "437286d3-1147-43ef-945f-8612d1610427" (UID: "437286d3-1147-43ef-945f-8612d1610427"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.316823 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6" (OuterVolumeSpecName: "kube-api-access-nfqn6") pod "437286d3-1147-43ef-945f-8612d1610427" (UID: "437286d3-1147-43ef-945f-8612d1610427"). InnerVolumeSpecName "kube-api-access-nfqn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.327834 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "437286d3-1147-43ef-945f-8612d1610427" (UID: "437286d3-1147-43ef-945f-8612d1610427"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.390818 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.390853 4576 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/437286d3-1147-43ef-945f-8612d1610427-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.390865 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfqn6\" (UniqueName: \"kubernetes.io/projected/437286d3-1147-43ef-945f-8612d1610427-kube-api-access-nfqn6\") on node \"crc\" DevicePath \"\"" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.738331 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.738355 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.739458 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-988np" Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.739827 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-988np" event={"ID":"437286d3-1147-43ef-945f-8612d1610427","Type":"ContainerDied","Data":"d004bca179190e860d0d1178fc26dce66f7ec5eb8f1a5dbb24451b09daee78e9"} Dec 03 09:12:59 crc kubenswrapper[4576]: I1203 09:12:59.739878 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d004bca179190e860d0d1178fc26dce66f7ec5eb8f1a5dbb24451b09daee78e9" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.187264 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.522898 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7678fb6bf-m8m4k"] Dec 03 09:13:00 crc kubenswrapper[4576]: E1203 09:13:00.523693 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="437286d3-1147-43ef-945f-8612d1610427" containerName="barbican-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.523722 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="437286d3-1147-43ef-945f-8612d1610427" containerName="barbican-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.523958 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="437286d3-1147-43ef-945f-8612d1610427" containerName="barbican-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.529506 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.546949 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.547254 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s6zfp" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.547440 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.593591 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7678fb6bf-m8m4k"] Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.624809 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-logs\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.624896 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data-custom\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.624933 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpkw2\" (UniqueName: \"kubernetes.io/projected/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-kube-api-access-jpkw2\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.624966 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-combined-ca-bundle\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.625003 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.647151 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8"] Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.649161 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.654170 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.688125 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.696625 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8"] Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.727058 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.728301 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9njg\" (UniqueName: \"kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.740676 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.740783 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.740849 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.740909 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data\") pod \"047a7c95-c4e3-46fa-8b1b-2a351992493e\" (UID: \"047a7c95-c4e3-46fa-8b1b-2a351992493e\") " Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741143 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data-custom\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741188 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data-custom\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741214 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpkw2\" (UniqueName: \"kubernetes.io/projected/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-kube-api-access-jpkw2\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741257 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-combined-ca-bundle\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741319 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741449 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4v74\" (UniqueName: \"kubernetes.io/projected/14ee4879-18ef-4d7f-956f-03297ff160fe-kube-api-access-l4v74\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741483 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-combined-ca-bundle\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741613 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-logs\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741644 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.741667 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ee4879-18ef-4d7f-956f-03297ff160fe-logs\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.747011 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-logs\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.754311 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg" (OuterVolumeSpecName: "kube-api-access-h9njg") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "kube-api-access-h9njg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.754506 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.754574 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.785669 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data-custom\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.790070 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpkw2\" (UniqueName: \"kubernetes.io/projected/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-kube-api-access-jpkw2\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.794855 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts" (OuterVolumeSpecName: "scripts") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.813378 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.814159 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qqg9g" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.814314 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qqg9g" event={"ID":"047a7c95-c4e3-46fa-8b1b-2a351992493e","Type":"ContainerDied","Data":"e264ec8476c2c8c9724caf9f0db8d94f54cb9499b0cbfc9a38a01a1108461053"} Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.814333 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e264ec8476c2c8c9724caf9f0db8d94f54cb9499b0cbfc9a38a01a1108461053" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.841264 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-combined-ca-bundle\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842629 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4v74\" (UniqueName: \"kubernetes.io/projected/14ee4879-18ef-4d7f-956f-03297ff160fe-kube-api-access-l4v74\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842659 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-combined-ca-bundle\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842724 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ee4879-18ef-4d7f-956f-03297ff160fe-logs\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842777 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data-custom\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842903 4576 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/047a7c95-c4e3-46fa-8b1b-2a351992493e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842915 4576 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842924 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.842933 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9njg\" (UniqueName: \"kubernetes.io/projected/047a7c95-c4e3-46fa-8b1b-2a351992493e-kube-api-access-h9njg\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.848065 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e42590-8fdb-4c05-a5e1-e2cddbeb0731-config-data\") pod \"barbican-worker-7678fb6bf-m8m4k\" (UID: \"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731\") " pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.853366 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ee4879-18ef-4d7f-956f-03297ff160fe-logs\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.861333 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-combined-ca-bundle\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.873059 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data-custom\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.885057 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.885394 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4v74\" (UniqueName: \"kubernetes.io/projected/14ee4879-18ef-4d7f-956f-03297ff160fe-kube-api-access-l4v74\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.894702 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:00 crc kubenswrapper[4576]: E1203 09:13:00.895140 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" containerName="cinder-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.895158 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" containerName="cinder-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.895346 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" containerName="cinder-db-sync" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.896010 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ee4879-18ef-4d7f-956f-03297ff160fe-config-data\") pod \"barbican-keystone-listener-9f4d5dcf8-5lvz8\" (UID: \"14ee4879-18ef-4d7f-956f-03297ff160fe\") " pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.896310 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.901668 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.929797 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data" (OuterVolumeSpecName: "config-data") pod "047a7c95-c4e3-46fa-8b1b-2a351992493e" (UID: "047a7c95-c4e3-46fa-8b1b-2a351992493e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.930341 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982435 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982484 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982519 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982619 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982795 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982825 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvqbf\" (UniqueName: \"kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982907 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.982926 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/047a7c95-c4e3-46fa-8b1b-2a351992493e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:00 crc kubenswrapper[4576]: I1203 09:13:00.991786 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.014621 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.016514 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.020451 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7678fb6bf-m8m4k" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.021047 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.033953 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.064515 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084038 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084079 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs9s6\" (UniqueName: \"kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084128 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084150 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084171 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvqbf\" (UniqueName: \"kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084235 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084250 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084272 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084339 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.084359 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.085145 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.085171 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.087274 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.087465 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.087725 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.122720 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvqbf\" (UniqueName: \"kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf\") pod \"dnsmasq-dns-848cf88cfc-nv5kc\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.186119 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.187483 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs9s6\" (UniqueName: \"kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.187568 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.187679 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.187702 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.188071 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.196879 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.197642 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.227693 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs9s6\" (UniqueName: \"kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.233212 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom\") pod \"barbican-api-75c4c4f78-4f2pp\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.246896 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.368618 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.385952 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:13:01 crc kubenswrapper[4576]: W1203 09:13:01.403983 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2daadf8d_95a3_4485_9f3e_105c6600c366.slice/crio-22b6c5e392e4c2ad379428bf234c261939fd1f825b013e2b3be7291c7378b61c WatchSource:0}: Error finding container 22b6c5e392e4c2ad379428bf234c261939fd1f825b013e2b3be7291c7378b61c: Status 404 returned error can't find the container with id 22b6c5e392e4c2ad379428bf234c261939fd1f825b013e2b3be7291c7378b61c Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.551319 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7678fb6bf-m8m4k"] Dec 03 09:13:01 crc kubenswrapper[4576]: W1203 09:13:01.620695 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8e42590_8fdb_4c05_a5e1_e2cddbeb0731.slice/crio-5dae7a24fd9b3f4dcb6656cc601ae0d0f13cae8c6bbe1047601868b3b3b3d746 WatchSource:0}: Error finding container 5dae7a24fd9b3f4dcb6656cc601ae0d0f13cae8c6bbe1047601868b3b3b3d746: Status 404 returned error can't find the container with id 5dae7a24fd9b3f4dcb6656cc601ae0d0f13cae8c6bbe1047601868b3b3b3d746 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.762162 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.762472 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.763222 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084"} pod="openstack/horizon-5d9b9454d4-cbqlk" containerMessage="Container horizon failed startup probe, will be restarted" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.763255 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" containerID="cri-o://ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.805210 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.854342 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-central-agent" containerID="cri-o://e664fb6cd75f081387fcb211ce9d76ee8303303cfcff773bb2e57ddfe65e3642" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.854824 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerStarted","Data":"ae0a04d232d12bf4257ec0e2410deacdb7cc54eae8b0d752239f5061e2baf8b8"} Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.867862 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.855037 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="sg-core" containerID="cri-o://1902fc43fcd456aa6d39a99c52b62c6e954722493df5faeacabc5d4d0f9e6e1b" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.855014 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-notification-agent" containerID="cri-o://32b57303e9fb81ccf028911c977c5217cead677e6f4bbdef90a0052653a5853f" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.855009 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="proxy-httpd" containerID="cri-o://ae0a04d232d12bf4257ec0e2410deacdb7cc54eae8b0d752239f5061e2baf8b8" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.907206 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerStarted","Data":"eb2ce85096e5add7053d4c6a8b10d2b7de4597750b3b8241e8ffb3c63b93b2ff"} Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.910161 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.910253 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.911983 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" event={"ID":"2daadf8d-95a3-4485-9f3e-105c6600c366","Type":"ContainerStarted","Data":"22b6c5e392e4c2ad379428bf234c261939fd1f825b013e2b3be7291c7378b61c"} Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.915130 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7678fb6bf-m8m4k" event={"ID":"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731","Type":"ContainerStarted","Data":"5dae7a24fd9b3f4dcb6656cc601ae0d0f13cae8c6bbe1047601868b3b3b3d746"} Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.917327 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"bfc5b586bd45522f00431a1d74aa16840b99866538d2111ba820658948c181a3"} pod="openstack/horizon-6d649695d8-6rtxn" containerMessage="Container horizon failed startup probe, will be restarted" Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.917396 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" containerID="cri-o://bfc5b586bd45522f00431a1d74aa16840b99866538d2111ba820658948c181a3" gracePeriod=30 Dec 03 09:13:01 crc kubenswrapper[4576]: I1203 09:13:01.974440 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.762897611 podStartE2EDuration="1m30.974413451s" podCreationTimestamp="2025-12-03 09:11:31 +0000 UTC" firstStartedPulling="2025-12-03 09:11:33.450453622 +0000 UTC m=+1900.836430606" lastFinishedPulling="2025-12-03 09:13:00.661969462 +0000 UTC m=+1988.047946446" observedRunningTime="2025-12-03 09:13:01.90805611 +0000 UTC m=+1989.294033094" watchObservedRunningTime="2025-12-03 09:13:01.974413451 +0000 UTC m=+1989.360390435" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.213957 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.226159 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.230074 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.230311 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rb52r" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.230426 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.231339 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.235546 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325166 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325248 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325282 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325321 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325350 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95cxm\" (UniqueName: \"kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.325374 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.371867 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.381568 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.406260 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.417065 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.426487 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.426778 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.426906 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.426991 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95cxm\" (UniqueName: \"kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.427103 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.427280 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.439941 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.440794 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.482916 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.503746 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.504568 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.505010 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.505953 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.509794 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.511625 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.513980 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.527020 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.537240 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95cxm\" (UniqueName: \"kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm\") pod \"cinder-scheduler-0\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543258 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543294 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543320 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97226\" (UniqueName: \"kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543359 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543396 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq45j\" (UniqueName: \"kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543423 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543437 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543455 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.543500 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.551891 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.551978 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.552008 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.552065 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673483 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673633 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673681 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673717 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97226\" (UniqueName: \"kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673766 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673816 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq45j\" (UniqueName: \"kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673856 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673877 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673903 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673953 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.673997 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.674028 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.674051 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.674076 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.675501 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.675583 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.675872 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.676377 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.676537 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.677161 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.684547 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.685196 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.685632 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.687916 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.691176 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.764458 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.866212 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f978d8b99-7d88p"] Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.943333 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97226\" (UniqueName: \"kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226\") pod \"cinder-api-0\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " pod="openstack/cinder-api-0" Dec 03 09:13:02 crc kubenswrapper[4576]: I1203 09:13:02.952438 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq45j\" (UniqueName: \"kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j\") pod \"dnsmasq-dns-6578955fd5-9v8tt\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.005545 4576 generic.go:334] "Generic (PLEG): container finished" podID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerID="ae0a04d232d12bf4257ec0e2410deacdb7cc54eae8b0d752239f5061e2baf8b8" exitCode=0 Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.005575 4576 generic.go:334] "Generic (PLEG): container finished" podID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerID="1902fc43fcd456aa6d39a99c52b62c6e954722493df5faeacabc5d4d0f9e6e1b" exitCode=2 Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.005643 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerDied","Data":"ae0a04d232d12bf4257ec0e2410deacdb7cc54eae8b0d752239f5061e2baf8b8"} Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.005669 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerDied","Data":"1902fc43fcd456aa6d39a99c52b62c6e954722493df5faeacabc5d4d0f9e6e1b"} Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.011625 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" event={"ID":"14ee4879-18ef-4d7f-956f-03297ff160fe","Type":"ContainerStarted","Data":"008bc92256fd3a2b71e0e1d428622c3980e65ac2667d37e7786fbbee3b8f257b"} Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.022284 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" event={"ID":"887afe2d-492c-43ac-a169-fc24f09da194","Type":"ContainerStarted","Data":"b5a35f3dd3cf7795118de80d4df0608a2e32691044d8a15a37413918994aa649"} Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.405481 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:03 crc kubenswrapper[4576]: I1203 09:13:03.699978 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.031175 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.073882 4576 generic.go:334] "Generic (PLEG): container finished" podID="2daadf8d-95a3-4485-9f3e-105c6600c366" containerID="d17d8348866bc3a5b6809a5c9134574fd640f8b550a9b1a52b038ddffd187c1d" exitCode=0 Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.074476 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" event={"ID":"2daadf8d-95a3-4485-9f3e-105c6600c366","Type":"ContainerDied","Data":"d17d8348866bc3a5b6809a5c9134574fd640f8b550a9b1a52b038ddffd187c1d"} Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.095489 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerStarted","Data":"ab194e0e46d9600808690c66a177b18f55fbeb7b27b84f9270841143d65c4098"} Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.144100 4576 generic.go:334] "Generic (PLEG): container finished" podID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerID="e664fb6cd75f081387fcb211ce9d76ee8303303cfcff773bb2e57ddfe65e3642" exitCode=0 Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.144199 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerDied","Data":"e664fb6cd75f081387fcb211ce9d76ee8303303cfcff773bb2e57ddfe65e3642"} Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.154915 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerStarted","Data":"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921"} Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.156821 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f978d8b99-7d88p" event={"ID":"cdfca795-30e9-4534-9084-e34e01ab71ae","Type":"ContainerStarted","Data":"45898280d6b7dd61746803195c363cb04fbbf79c5a4f12b9fdada1208bea6130"} Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.357477 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.783821 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.809393 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863155 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863190 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkcj2\" (UniqueName: \"kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863250 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863389 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863409 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.863455 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb\") pod \"2daadf8d-95a3-4485-9f3e-105c6600c366\" (UID: \"2daadf8d-95a3-4485-9f3e-105c6600c366\") " Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.905476 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2" (OuterVolumeSpecName: "kube-api-access-zkcj2") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "kube-api-access-zkcj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:04 crc kubenswrapper[4576]: I1203 09:13:04.965795 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkcj2\" (UniqueName: \"kubernetes.io/projected/2daadf8d-95a3-4485-9f3e-105c6600c366-kube-api-access-zkcj2\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.020291 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.067661 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.075588 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.075620 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.116830 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.121138 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config" (OuterVolumeSpecName: "config") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.142463 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2daadf8d-95a3-4485-9f3e-105c6600c366" (UID: "2daadf8d-95a3-4485-9f3e-105c6600c366"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.178652 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.178683 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.178696 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2daadf8d-95a3-4485-9f3e-105c6600c366-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.229786 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerStarted","Data":"2f60b6010678375d873be5a1c40d3cbbbe4d14608aa8cedf0162791cd72f8686"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.325287 4576 generic.go:334] "Generic (PLEG): container finished" podID="887afe2d-492c-43ac-a169-fc24f09da194" containerID="ee240d10ab38b3119083dc21ebf5991235b7db2cd91eb291ab80a9a4ff63286b" exitCode=0 Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.325370 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" event={"ID":"887afe2d-492c-43ac-a169-fc24f09da194","Type":"ContainerDied","Data":"ee240d10ab38b3119083dc21ebf5991235b7db2cd91eb291ab80a9a4ff63286b"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.375387 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerStarted","Data":"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.399357 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerStarted","Data":"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.400576 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.453832 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f978d8b99-7d88p" event={"ID":"cdfca795-30e9-4534-9084-e34e01ab71ae","Type":"ContainerStarted","Data":"e2c221a31a44e68844aee8870c6f414e7034da8f5290f94ea1583063664b4e67"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.456375 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerStarted","Data":"a10bc41517e14fdbd619b4965f9bb13494f87f18acd29536fbf0ab6670e9eb7b"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.464765 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerStarted","Data":"cb23249c9e1b86979e0198a0eb75cf15f83233a58d110a1e783f04b4f6bea554"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.464803 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerStarted","Data":"57bc70804c6fa94d67b9475c0a19fc6c749c3c087089930792dfbec6321febc2"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.493031 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" event={"ID":"2daadf8d-95a3-4485-9f3e-105c6600c366","Type":"ContainerDied","Data":"22b6c5e392e4c2ad379428bf234c261939fd1f825b013e2b3be7291c7378b61c"} Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.493084 4576 scope.go:117] "RemoveContainer" containerID="d17d8348866bc3a5b6809a5c9134574fd640f8b550a9b1a52b038ddffd187c1d" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.493214 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-knzgm" Dec 03 09:13:05 crc kubenswrapper[4576]: I1203 09:13:05.521275 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-667d896bdd-mtmgs" podStartSLOduration=14.521255082 podStartE2EDuration="14.521255082s" podCreationTimestamp="2025-12-03 09:12:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:05.49187813 +0000 UTC m=+1992.877855114" watchObservedRunningTime="2025-12-03 09:13:05.521255082 +0000 UTC m=+1992.907232066" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.157628 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.191813 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-knzgm"] Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.473870 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.549543 4576 generic.go:334] "Generic (PLEG): container finished" podID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerID="cb23249c9e1b86979e0198a0eb75cf15f83233a58d110a1e783f04b4f6bea554" exitCode=0 Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.550379 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerDied","Data":"cb23249c9e1b86979e0198a0eb75cf15f83233a58d110a1e783f04b4f6bea554"} Dec 03 09:13:06 crc kubenswrapper[4576]: E1203 09:13:06.628456 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2daadf8d_95a3_4485_9f3e_105c6600c366.slice\": RecentStats: unable to find data in memory cache]" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.671707 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782146 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvqbf\" (UniqueName: \"kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782243 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782274 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782386 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.782430 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb\") pod \"887afe2d-492c-43ac-a169-fc24f09da194\" (UID: \"887afe2d-492c-43ac-a169-fc24f09da194\") " Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.801938 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf" (OuterVolumeSpecName: "kube-api-access-zvqbf") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "kube-api-access-zvqbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.828812 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.860899 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.886962 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config" (OuterVolumeSpecName: "config") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.889126 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.889148 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvqbf\" (UniqueName: \"kubernetes.io/projected/887afe2d-492c-43ac-a169-fc24f09da194-kube-api-access-zvqbf\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.889159 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.889167 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.939613 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.943969 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "887afe2d-492c-43ac-a169-fc24f09da194" (UID: "887afe2d-492c-43ac-a169-fc24f09da194"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.992947 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:06 crc kubenswrapper[4576]: I1203 09:13:06.992975 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887afe2d-492c-43ac-a169-fc24f09da194-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.567400 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f978d8b99-7d88p" event={"ID":"cdfca795-30e9-4534-9084-e34e01ab71ae","Type":"ContainerStarted","Data":"87b83a94e256333c969b7089a19fc8b9395eb6c865adddb572f67f06ffc6dad8"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.568596 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.571326 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerStarted","Data":"0832e015f2eadb9ceae7e56734f7e1c8011a7b01fed9eac59a662f40962a14cb"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.576353 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerStarted","Data":"8dad5412efe5ec3057df4e8656c7cff2451a8506dc9af8e486c3c802f77ff1fd"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.577314 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.578810 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" event={"ID":"887afe2d-492c-43ac-a169-fc24f09da194","Type":"ContainerDied","Data":"b5a35f3dd3cf7795118de80d4df0608a2e32691044d8a15a37413918994aa649"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.578835 4576 scope.go:117] "RemoveContainer" containerID="ee240d10ab38b3119083dc21ebf5991235b7db2cd91eb291ab80a9a4ff63286b" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.578927 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-nv5kc" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.594388 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f978d8b99-7d88p" podStartSLOduration=13.594368592 podStartE2EDuration="13.594368592s" podCreationTimestamp="2025-12-03 09:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:07.58626125 +0000 UTC m=+1994.972238234" watchObservedRunningTime="2025-12-03 09:13:07.594368592 +0000 UTC m=+1994.980345576" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.600816 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerStarted","Data":"96bd187c3393bb473bb0fa298313527f161e11b189b563b98adce3367c233ebf"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.611737 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerStarted","Data":"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.612512 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.612556 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.620752 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" podStartSLOduration=5.620735622 podStartE2EDuration="5.620735622s" podCreationTimestamp="2025-12-03 09:13:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:07.61997501 +0000 UTC m=+1995.005951994" watchObservedRunningTime="2025-12-03 09:13:07.620735622 +0000 UTC m=+1995.006712596" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.626310 4576 generic.go:334] "Generic (PLEG): container finished" podID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerID="32b57303e9fb81ccf028911c977c5217cead677e6f4bbdef90a0052653a5853f" exitCode=0 Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.626452 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerDied","Data":"32b57303e9fb81ccf028911c977c5217cead677e6f4bbdef90a0052653a5853f"} Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.666590 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.674870 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-nv5kc"] Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.682954 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-75c4c4f78-4f2pp" podStartSLOduration=7.682938579 podStartE2EDuration="7.682938579s" podCreationTimestamp="2025-12-03 09:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:07.681287744 +0000 UTC m=+1995.067264738" watchObservedRunningTime="2025-12-03 09:13:07.682938579 +0000 UTC m=+1995.068915563" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.706684 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2daadf8d-95a3-4485-9f3e-105c6600c366" path="/var/lib/kubelet/pods/2daadf8d-95a3-4485-9f3e-105c6600c366/volumes" Dec 03 09:13:07 crc kubenswrapper[4576]: I1203 09:13:07.707256 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887afe2d-492c-43ac-a169-fc24f09da194" path="/var/lib/kubelet/pods/887afe2d-492c-43ac-a169-fc24f09da194/volumes" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.007570 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-679c878b58-l5t4t"] Dec 03 09:13:09 crc kubenswrapper[4576]: E1203 09:13:09.008346 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887afe2d-492c-43ac-a169-fc24f09da194" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.008358 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="887afe2d-492c-43ac-a169-fc24f09da194" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: E1203 09:13:09.008382 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2daadf8d-95a3-4485-9f3e-105c6600c366" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.008388 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2daadf8d-95a3-4485-9f3e-105c6600c366" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.008602 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2daadf8d-95a3-4485-9f3e-105c6600c366" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.008624 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="887afe2d-492c-43ac-a169-fc24f09da194" containerName="init" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.009643 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.017917 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.018086 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.043286 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-679c878b58-l5t4t"] Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071206 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktnzw\" (UniqueName: \"kubernetes.io/projected/c3348234-cbbe-464e-b7dd-493151ce96ef-kube-api-access-ktnzw\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071280 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-combined-ca-bundle\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071317 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-internal-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071332 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071353 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3348234-cbbe-464e-b7dd-493151ce96ef-logs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071375 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-public-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.071408 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data-custom\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.172585 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktnzw\" (UniqueName: \"kubernetes.io/projected/c3348234-cbbe-464e-b7dd-493151ce96ef-kube-api-access-ktnzw\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.172925 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-combined-ca-bundle\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.173035 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-internal-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.173110 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.173187 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3348234-cbbe-464e-b7dd-493151ce96ef-logs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.173270 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-public-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.173356 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data-custom\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.174975 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3348234-cbbe-464e-b7dd-493151ce96ef-logs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.181048 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data-custom\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.182792 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-combined-ca-bundle\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.190223 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-config-data\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.192885 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-public-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.196908 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3348234-cbbe-464e-b7dd-493151ce96ef-internal-tls-certs\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.203012 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktnzw\" (UniqueName: \"kubernetes.io/projected/c3348234-cbbe-464e-b7dd-493151ce96ef-kube-api-access-ktnzw\") pod \"barbican-api-679c878b58-l5t4t\" (UID: \"c3348234-cbbe-464e-b7dd-493151ce96ef\") " pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.347372 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.963504 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996291 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996347 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996444 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996498 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996590 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996624 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:09 crc kubenswrapper[4576]: I1203 09:13:09.996699 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2m6c\" (UniqueName: \"kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c\") pod \"b5fe3155-d737-4a7e-9596-a6080cae4b27\" (UID: \"b5fe3155-d737-4a7e-9596-a6080cae4b27\") " Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:09.998267 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:09.998365 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.003136 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts" (OuterVolumeSpecName: "scripts") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.024735 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c" (OuterVolumeSpecName: "kube-api-access-d2m6c") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "kube-api-access-d2m6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.100853 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.100891 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.100900 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5fe3155-d737-4a7e-9596-a6080cae4b27-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.100909 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2m6c\" (UniqueName: \"kubernetes.io/projected/b5fe3155-d737-4a7e-9596-a6080cae4b27-kube-api-access-d2m6c\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.149686 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.202365 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.258703 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data" (OuterVolumeSpecName: "config-data") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.283411 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5fe3155-d737-4a7e-9596-a6080cae4b27" (UID: "b5fe3155-d737-4a7e-9596-a6080cae4b27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.303593 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.303631 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fe3155-d737-4a7e-9596-a6080cae4b27-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.512304 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-679c878b58-l5t4t"] Dec 03 09:13:10 crc kubenswrapper[4576]: W1203 09:13:10.542219 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3348234_cbbe_464e_b7dd_493151ce96ef.slice/crio-80085248b66b3b229d8e68b54ae74eb8015f79048f0faebae6fd91b2b0ce5b34 WatchSource:0}: Error finding container 80085248b66b3b229d8e68b54ae74eb8015f79048f0faebae6fd91b2b0ce5b34: Status 404 returned error can't find the container with id 80085248b66b3b229d8e68b54ae74eb8015f79048f0faebae6fd91b2b0ce5b34 Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.729739 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7678fb6bf-m8m4k" event={"ID":"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731","Type":"ContainerStarted","Data":"97c41f2536f714e6aff40789b2e391ee9f9d47287fa8ebde6dd5a7ea1924eee4"} Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.737911 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679c878b58-l5t4t" event={"ID":"c3348234-cbbe-464e-b7dd-493151ce96ef","Type":"ContainerStarted","Data":"80085248b66b3b229d8e68b54ae74eb8015f79048f0faebae6fd91b2b0ce5b34"} Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.758564 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b5fe3155-d737-4a7e-9596-a6080cae4b27","Type":"ContainerDied","Data":"7b3e903901080426c7c3f12f79ce1f6eb6bcc782e089b6d983383afe055fb3d3"} Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.758865 4576 scope.go:117] "RemoveContainer" containerID="ae0a04d232d12bf4257ec0e2410deacdb7cc54eae8b0d752239f5061e2baf8b8" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.758977 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.940599 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.945620 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.984794 4576 scope.go:117] "RemoveContainer" containerID="1902fc43fcd456aa6d39a99c52b62c6e954722493df5faeacabc5d4d0f9e6e1b" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.985713 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:10 crc kubenswrapper[4576]: E1203 09:13:10.986053 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="proxy-httpd" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986064 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="proxy-httpd" Dec 03 09:13:10 crc kubenswrapper[4576]: E1203 09:13:10.986088 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="sg-core" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986095 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="sg-core" Dec 03 09:13:10 crc kubenswrapper[4576]: E1203 09:13:10.986115 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-central-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986121 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-central-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: E1203 09:13:10.986135 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-notification-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986140 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-notification-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986306 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-central-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986328 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="ceilometer-notification-agent" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986341 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="sg-core" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.986361 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" containerName="proxy-httpd" Dec 03 09:13:10 crc kubenswrapper[4576]: I1203 09:13:10.998232 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.015897 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.016113 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.042075 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128671 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128734 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67sfp\" (UniqueName: \"kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128787 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128808 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128916 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128953 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.128985 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231577 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231670 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231694 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67sfp\" (UniqueName: \"kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231732 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231749 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231823 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.231852 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.238941 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.239392 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.239567 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.239805 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.245911 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.263695 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.267675 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67sfp\" (UniqueName: \"kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp\") pod \"ceilometer-0\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.328795 4576 scope.go:117] "RemoveContainer" containerID="32b57303e9fb81ccf028911c977c5217cead677e6f4bbdef90a0052653a5853f" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.389881 4576 scope.go:117] "RemoveContainer" containerID="e664fb6cd75f081387fcb211ce9d76ee8303303cfcff773bb2e57ddfe65e3642" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.557008 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.701075 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5fe3155-d737-4a7e-9596-a6080cae4b27" path="/var/lib/kubelet/pods/b5fe3155-d737-4a7e-9596-a6080cae4b27/volumes" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.786751 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" event={"ID":"14ee4879-18ef-4d7f-956f-03297ff160fe","Type":"ContainerStarted","Data":"665d2b9a6ca8f47a241e38c26d641b1b1781071d1f0bdc98504ee07492c1f8c8"} Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.786814 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" event={"ID":"14ee4879-18ef-4d7f-956f-03297ff160fe","Type":"ContainerStarted","Data":"17de0844f6e251fd0d985e0aed96cddb4abb63bf54f0be19ea957baf714cefc0"} Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.814273 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-9f4d5dcf8-5lvz8" podStartSLOduration=4.360136052 podStartE2EDuration="11.814254742s" podCreationTimestamp="2025-12-03 09:13:00 +0000 UTC" firstStartedPulling="2025-12-03 09:13:02.399592195 +0000 UTC m=+1989.785569179" lastFinishedPulling="2025-12-03 09:13:09.853710885 +0000 UTC m=+1997.239687869" observedRunningTime="2025-12-03 09:13:11.810793337 +0000 UTC m=+1999.196770331" watchObservedRunningTime="2025-12-03 09:13:11.814254742 +0000 UTC m=+1999.200231726" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.823833 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerStarted","Data":"9bed9b661351fcb0057b958916fa4d6aaadf7fc0e31079310fd45d3d1c0b6ffa"} Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.824044 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api-log" containerID="cri-o://96bd187c3393bb473bb0fa298313527f161e11b189b563b98adce3367c233ebf" gracePeriod=30 Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.824330 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.824668 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" containerID="cri-o://9bed9b661351fcb0057b958916fa4d6aaadf7fc0e31079310fd45d3d1c0b6ffa" gracePeriod=30 Dec 03 09:13:11 crc kubenswrapper[4576]: I1203 09:13:11.893113 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=9.893093343 podStartE2EDuration="9.893093343s" podCreationTimestamp="2025-12-03 09:13:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:11.878893086 +0000 UTC m=+1999.264870070" watchObservedRunningTime="2025-12-03 09:13:11.893093343 +0000 UTC m=+1999.279070327" Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.547584 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.876685 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerStarted","Data":"6a236c29784a8be34119dc8020c880b30270a6fa8ee8f642103657e81af80278"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.881161 4576 generic.go:334] "Generic (PLEG): container finished" podID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerID="96bd187c3393bb473bb0fa298313527f161e11b189b563b98adce3367c233ebf" exitCode=143 Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.881223 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerDied","Data":"96bd187c3393bb473bb0fa298313527f161e11b189b563b98adce3367c233ebf"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.883100 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerStarted","Data":"e8a131d9ecaec9e78ab30042925de4c2e360926b9a0b7e5db54a79fdbe43903e"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.889132 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7678fb6bf-m8m4k" event={"ID":"e8e42590-8fdb-4c05-a5e1-e2cddbeb0731","Type":"ContainerStarted","Data":"d5f41fb0b6c3a8c8afc37a758d7452917fcfb912e231f4ab212a76f409c09492"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.892786 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679c878b58-l5t4t" event={"ID":"c3348234-cbbe-464e-b7dd-493151ce96ef","Type":"ContainerStarted","Data":"95c2da6d16da50c79bfca3d4de62348de764f8dd9cd163ddc3672143e66a4c2c"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.892815 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.892826 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679c878b58-l5t4t" event={"ID":"c3348234-cbbe-464e-b7dd-493151ce96ef","Type":"ContainerStarted","Data":"34b618e0290cbbf606c8a60d857dc33dc3999d4723866927a583118ad298e89b"} Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.892848 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.907876 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=10.929681431 podStartE2EDuration="11.907860368s" podCreationTimestamp="2025-12-03 09:13:01 +0000 UTC" firstStartedPulling="2025-12-03 09:13:04.069045418 +0000 UTC m=+1991.455022402" lastFinishedPulling="2025-12-03 09:13:05.047224365 +0000 UTC m=+1992.433201339" observedRunningTime="2025-12-03 09:13:12.903340135 +0000 UTC m=+2000.289317119" watchObservedRunningTime="2025-12-03 09:13:12.907860368 +0000 UTC m=+2000.293837352" Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.959758 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-679c878b58-l5t4t" podStartSLOduration=4.959729114 podStartE2EDuration="4.959729114s" podCreationTimestamp="2025-12-03 09:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:12.952758324 +0000 UTC m=+2000.338735308" watchObservedRunningTime="2025-12-03 09:13:12.959729114 +0000 UTC m=+2000.345706108" Dec 03 09:13:12 crc kubenswrapper[4576]: I1203 09:13:12.980022 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7678fb6bf-m8m4k" podStartSLOduration=4.562813744 podStartE2EDuration="12.980001907s" podCreationTimestamp="2025-12-03 09:13:00 +0000 UTC" firstStartedPulling="2025-12-03 09:13:01.648956399 +0000 UTC m=+1989.034933393" lastFinishedPulling="2025-12-03 09:13:10.066144572 +0000 UTC m=+1997.452121556" observedRunningTime="2025-12-03 09:13:12.978628429 +0000 UTC m=+2000.364605413" watchObservedRunningTime="2025-12-03 09:13:12.980001907 +0000 UTC m=+2000.365978891" Dec 03 09:13:13 crc kubenswrapper[4576]: I1203 09:13:13.407670 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:13:13 crc kubenswrapper[4576]: I1203 09:13:13.547224 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:13:13 crc kubenswrapper[4576]: I1203 09:13:13.549794 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="dnsmasq-dns" containerID="cri-o://3700fddfb4a90f02b766ca62a7992ef81fcf3023ee2a1a3f6b6e632eb66beab6" gracePeriod=10 Dec 03 09:13:13 crc kubenswrapper[4576]: I1203 09:13:13.934692 4576 generic.go:334] "Generic (PLEG): container finished" podID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerID="3700fddfb4a90f02b766ca62a7992ef81fcf3023ee2a1a3f6b6e632eb66beab6" exitCode=0 Dec 03 09:13:13 crc kubenswrapper[4576]: I1203 09:13:13.936005 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" event={"ID":"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2","Type":"ContainerDied","Data":"3700fddfb4a90f02b766ca62a7992ef81fcf3023ee2a1a3f6b6e632eb66beab6"} Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.459656 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621317 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621395 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621465 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621491 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621554 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.621619 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sthtq\" (UniqueName: \"kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq\") pod \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\" (UID: \"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2\") " Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.634783 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq" (OuterVolumeSpecName: "kube-api-access-sthtq") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "kube-api-access-sthtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.703492 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.723996 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sthtq\" (UniqueName: \"kubernetes.io/projected/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-kube-api-access-sthtq\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.825211 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.834381 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.853246 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.859296 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.891564 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config" (OuterVolumeSpecName: "config") pod "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" (UID: "3ca98b4d-d895-4456-81ae-ea8c0d5f59d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.926659 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.926693 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.926705 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.926717 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.926725 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:14 crc kubenswrapper[4576]: I1203 09:13:14.977937 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerStarted","Data":"911097f0a9e8cada88ce210c8b6e3adebca482981d855dfbb11e27d113f3b59e"} Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.004310 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.005651 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-dl696" event={"ID":"3ca98b4d-d895-4456-81ae-ea8c0d5f59d2","Type":"ContainerDied","Data":"ae26bdc5822d38bbd80e3c4408db0cebe0ca005518dd0e6970f1bfd22945ca15"} Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.005713 4576 scope.go:117] "RemoveContainer" containerID="3700fddfb4a90f02b766ca62a7992ef81fcf3023ee2a1a3f6b6e632eb66beab6" Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.038201 4576 scope.go:117] "RemoveContainer" containerID="91903483448a43d2d3bbebe8ba153627d8cf9b45b44c888db9f454047ae87870" Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.072873 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.086979 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-dl696"] Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.418731 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:15 crc kubenswrapper[4576]: I1203 09:13:15.709934 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" path="/var/lib/kubelet/pods/3ca98b4d-d895-4456-81ae-ea8c0d5f59d2/volumes" Dec 03 09:13:16 crc kubenswrapper[4576]: I1203 09:13:16.015208 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerStarted","Data":"80c57d3c0ad3fbb0e6afa522538c05ba7514a3151fa069b74598f692e556f6fb"} Dec 03 09:13:16 crc kubenswrapper[4576]: I1203 09:13:16.015251 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerStarted","Data":"b4b98a8d6c628e682631dfcc383328641fe0d3cc133f9eeb49129680a5ce7511"} Dec 03 09:13:16 crc kubenswrapper[4576]: I1203 09:13:16.410805 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:17 crc kubenswrapper[4576]: I1203 09:13:17.687269 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 09:13:17 crc kubenswrapper[4576]: I1203 09:13:17.687367 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.161:8080/\": dial tcp 10.217.0.161:8080: connect: connection refused" Dec 03 09:13:18 crc kubenswrapper[4576]: I1203 09:13:18.106913 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerStarted","Data":"84fad1d87e8198d81331d0ccad98dbeb679c5449a353d8c8fe08dbe809622c04"} Dec 03 09:13:18 crc kubenswrapper[4576]: I1203 09:13:18.108461 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:13:18 crc kubenswrapper[4576]: I1203 09:13:18.152771 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.445652205 podStartE2EDuration="8.152754183s" podCreationTimestamp="2025-12-03 09:13:10 +0000 UTC" firstStartedPulling="2025-12-03 09:13:12.560618461 +0000 UTC m=+1999.946595445" lastFinishedPulling="2025-12-03 09:13:17.267720439 +0000 UTC m=+2004.653697423" observedRunningTime="2025-12-03 09:13:18.15153825 +0000 UTC m=+2005.537515234" watchObservedRunningTime="2025-12-03 09:13:18.152754183 +0000 UTC m=+2005.538731177" Dec 03 09:13:18 crc kubenswrapper[4576]: I1203 09:13:18.370779 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:19 crc kubenswrapper[4576]: I1203 09:13:19.744730 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:19 crc kubenswrapper[4576]: I1203 09:13:19.753897 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:20 crc kubenswrapper[4576]: I1203 09:13:20.461829 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:20 crc kubenswrapper[4576]: I1203 09:13:20.530296 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:13:20 crc kubenswrapper[4576]: I1203 09:13:20.887406 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f666f544-xj7sz" Dec 03 09:13:21 crc kubenswrapper[4576]: I1203 09:13:21.451729 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:21 crc kubenswrapper[4576]: I1203 09:13:21.471404 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:21 crc kubenswrapper[4576]: I1203 09:13:21.471463 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-69657bfb7-ncr7l" Dec 03 09:13:22 crc kubenswrapper[4576]: I1203 09:13:22.278852 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-667d896bdd-mtmgs" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:22 crc kubenswrapper[4576]: I1203 09:13:22.283219 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-667d896bdd-mtmgs" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:22 crc kubenswrapper[4576]: I1203 09:13:22.283750 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-667d896bdd-mtmgs" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.189162 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.306277 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.356984 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-679c878b58-l5t4t" podUID="c3348234-cbbe-464e-b7dd-493151ce96ef" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.384653 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.652970 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-679c878b58-l5t4t" Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.749995 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.750215 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" containerID="cri-o://b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695" gracePeriod=30 Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.750641 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" containerID="cri-o://6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330" gracePeriod=30 Dec 03 09:13:23 crc kubenswrapper[4576]: I1203 09:13:23.801246 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.195976 4576 generic.go:334] "Generic (PLEG): container finished" podID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerID="b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695" exitCode=143 Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.196505 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="cinder-scheduler" containerID="cri-o://0832e015f2eadb9ceae7e56734f7e1c8011a7b01fed9eac59a662f40962a14cb" gracePeriod=30 Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.196619 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="probe" containerID="cri-o://6a236c29784a8be34119dc8020c880b30270a6fa8ee8f642103657e81af80278" gracePeriod=30 Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.196668 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerDied","Data":"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695"} Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.530261 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:24 crc kubenswrapper[4576]: E1203 09:13:24.530943 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="init" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.530963 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="init" Dec 03 09:13:24 crc kubenswrapper[4576]: E1203 09:13:24.530996 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="dnsmasq-dns" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.531004 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="dnsmasq-dns" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.531246 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca98b4d-d895-4456-81ae-ea8c0d5f59d2" containerName="dnsmasq-dns" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.531883 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.539733 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.548419 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.548645 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-gtlww" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.548794 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.580152 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-6f978d8b99-7d88p" podUID="cdfca795-30e9-4534-9084-e34e01ab71ae" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.594891 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-6f978d8b99-7d88p" podUID="cdfca795-30e9-4534-9084-e34e01ab71ae" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.620949 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6f978d8b99-7d88p" podUID="cdfca795-30e9-4534-9084-e34e01ab71ae" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.690509 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.776686 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgzhh\" (UniqueName: \"kubernetes.io/projected/96941bb6-e76f-4589-a05e-73a373c310b6-kube-api-access-vgzhh\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.776869 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.776892 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.878582 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:24 crc kubenswrapper[4576]: E1203 09:13:24.879332 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-vgzhh openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="96941bb6-e76f-4589-a05e-73a373c310b6" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.882538 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.882572 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.882677 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.882707 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgzhh\" (UniqueName: \"kubernetes.io/projected/96941bb6-e76f-4589-a05e-73a373c310b6-kube-api-access-vgzhh\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.886268 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: E1203 09:13:24.895169 4576 projected.go:194] Error preparing data for projected volume kube-api-access-vgzhh for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 03 09:13:24 crc kubenswrapper[4576]: E1203 09:13:24.895238 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/96941bb6-e76f-4589-a05e-73a373c310b6-kube-api-access-vgzhh podName:96941bb6-e76f-4589-a05e-73a373c310b6 nodeName:}" failed. No retries permitted until 2025-12-03 09:13:25.395220199 +0000 UTC m=+2012.781197183 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vgzhh" (UniqueName: "kubernetes.io/projected/96941bb6-e76f-4589-a05e-73a373c310b6-kube-api-access-vgzhh") pod "openstackclient" (UID: "96941bb6-e76f-4589-a05e-73a373c310b6") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.903509 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.910193 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " pod="openstack/openstackclient" Dec 03 09:13:24 crc kubenswrapper[4576]: I1203 09:13:24.958624 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.015635 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.017303 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.072397 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.090324 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-combined-ca-bundle\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.090421 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config-secret\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.090536 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69fbh\" (UniqueName: \"kubernetes.io/projected/649a142a-4649-45fb-bdba-11fcc838bf97-kube-api-access-69fbh\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.090597 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.192382 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69fbh\" (UniqueName: \"kubernetes.io/projected/649a142a-4649-45fb-bdba-11fcc838bf97-kube-api-access-69fbh\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.192454 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.192478 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-combined-ca-bundle\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.192544 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config-secret\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.193857 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.201088 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-openstack-config-secret\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.206427 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649a142a-4649-45fb-bdba-11fcc838bf97-combined-ca-bundle\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.215066 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69fbh\" (UniqueName: \"kubernetes.io/projected/649a142a-4649-45fb-bdba-11fcc838bf97-kube-api-access-69fbh\") pod \"openstackclient\" (UID: \"649a142a-4649-45fb-bdba-11fcc838bf97\") " pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.219863 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.232029 4576 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="96941bb6-e76f-4589-a05e-73a373c310b6" podUID="649a142a-4649-45fb-bdba-11fcc838bf97" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.284774 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.378124 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.395211 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret\") pod \"96941bb6-e76f-4589-a05e-73a373c310b6\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.395306 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle\") pod \"96941bb6-e76f-4589-a05e-73a373c310b6\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.395427 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config\") pod \"96941bb6-e76f-4589-a05e-73a373c310b6\" (UID: \"96941bb6-e76f-4589-a05e-73a373c310b6\") " Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.395819 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgzhh\" (UniqueName: \"kubernetes.io/projected/96941bb6-e76f-4589-a05e-73a373c310b6-kube-api-access-vgzhh\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.396307 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "96941bb6-e76f-4589-a05e-73a373c310b6" (UID: "96941bb6-e76f-4589-a05e-73a373c310b6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.416925 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "96941bb6-e76f-4589-a05e-73a373c310b6" (UID: "96941bb6-e76f-4589-a05e-73a373c310b6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.417039 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96941bb6-e76f-4589-a05e-73a373c310b6" (UID: "96941bb6-e76f-4589-a05e-73a373c310b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.500703 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.500734 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.500744 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/96941bb6-e76f-4589-a05e-73a373c310b6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:25 crc kubenswrapper[4576]: I1203 09:13:25.693216 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96941bb6-e76f-4589-a05e-73a373c310b6" path="/var/lib/kubelet/pods/96941bb6-e76f-4589-a05e-73a373c310b6/volumes" Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.231046 4576 generic.go:334] "Generic (PLEG): container finished" podID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerID="6a236c29784a8be34119dc8020c880b30270a6fa8ee8f642103657e81af80278" exitCode=0 Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.231094 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerDied","Data":"6a236c29784a8be34119dc8020c880b30270a6fa8ee8f642103657e81af80278"} Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.231700 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.239298 4576 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="96941bb6-e76f-4589-a05e-73a373c310b6" podUID="649a142a-4649-45fb-bdba-11fcc838bf97" Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.350233 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 09:13:26 crc kubenswrapper[4576]: I1203 09:13:26.648784 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-679c878b58-l5t4t" podUID="c3348234-cbbe-464e-b7dd-493151ce96ef" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:27 crc kubenswrapper[4576]: I1203 09:13:27.251658 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"649a142a-4649-45fb-bdba-11fcc838bf97","Type":"ContainerStarted","Data":"3c646e99fdee03154abdab080ccdda0d989c2f987c51762bd80aaf95b31cc660"} Dec 03 09:13:27 crc kubenswrapper[4576]: I1203 09:13:27.407662 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:33572->10.217.0.160:9311: read: connection reset by peer" Dec 03 09:13:27 crc kubenswrapper[4576]: I1203 09:13:27.407413 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75c4c4f78-4f2pp" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:33570->10.217.0.160:9311: read: connection reset by peer" Dec 03 09:13:27 crc kubenswrapper[4576]: E1203 09:13:27.560640 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod114a8b03_2981_44ce_84e0_6ee5d96ab0b6.slice/crio-conmon-6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod114a8b03_2981_44ce_84e0_6ee5d96ab0b6.slice/crio-6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.015448 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.164514 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom\") pod \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.164595 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle\") pod \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.164651 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs\") pod \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.164757 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data\") pod \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.164779 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zs9s6\" (UniqueName: \"kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6\") pod \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\" (UID: \"114a8b03-2981-44ce-84e0-6ee5d96ab0b6\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.165219 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs" (OuterVolumeSpecName: "logs") pod "114a8b03-2981-44ce-84e0-6ee5d96ab0b6" (UID: "114a8b03-2981-44ce-84e0-6ee5d96ab0b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.213986 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "114a8b03-2981-44ce-84e0-6ee5d96ab0b6" (UID: "114a8b03-2981-44ce-84e0-6ee5d96ab0b6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.214135 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6" (OuterVolumeSpecName: "kube-api-access-zs9s6") pod "114a8b03-2981-44ce-84e0-6ee5d96ab0b6" (UID: "114a8b03-2981-44ce-84e0-6ee5d96ab0b6"). InnerVolumeSpecName "kube-api-access-zs9s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.229745 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "114a8b03-2981-44ce-84e0-6ee5d96ab0b6" (UID: "114a8b03-2981-44ce-84e0-6ee5d96ab0b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.254929 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data" (OuterVolumeSpecName: "config-data") pod "114a8b03-2981-44ce-84e0-6ee5d96ab0b6" (UID: "114a8b03-2981-44ce-84e0-6ee5d96ab0b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.270797 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.271009 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.271141 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.271201 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zs9s6\" (UniqueName: \"kubernetes.io/projected/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-kube-api-access-zs9s6\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.271257 4576 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/114a8b03-2981-44ce-84e0-6ee5d96ab0b6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.280010 4576 generic.go:334] "Generic (PLEG): container finished" podID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerID="0832e015f2eadb9ceae7e56734f7e1c8011a7b01fed9eac59a662f40962a14cb" exitCode=0 Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.280079 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerDied","Data":"0832e015f2eadb9ceae7e56734f7e1c8011a7b01fed9eac59a662f40962a14cb"} Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.295650 4576 generic.go:334] "Generic (PLEG): container finished" podID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerID="6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330" exitCode=0 Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.296002 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75c4c4f78-4f2pp" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.296023 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerDied","Data":"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330"} Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.296668 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75c4c4f78-4f2pp" event={"ID":"114a8b03-2981-44ce-84e0-6ee5d96ab0b6","Type":"ContainerDied","Data":"ab194e0e46d9600808690c66a177b18f55fbeb7b27b84f9270841143d65c4098"} Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.296700 4576 scope.go:117] "RemoveContainer" containerID="6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.338455 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.345971 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-75c4c4f78-4f2pp"] Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.389677 4576 scope.go:117] "RemoveContainer" containerID="b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.397825 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-679c878b58-l5t4t" podUID="c3348234-cbbe-464e-b7dd-493151ce96ef" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.447626 4576 scope.go:117] "RemoveContainer" containerID="6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330" Dec 03 09:13:28 crc kubenswrapper[4576]: E1203 09:13:28.449945 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330\": container with ID starting with 6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330 not found: ID does not exist" containerID="6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.449971 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330"} err="failed to get container status \"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330\": rpc error: code = NotFound desc = could not find container \"6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330\": container with ID starting with 6ef82bfb2166922493bd35625c7139aad0ca292c473c4fbe824f592069923330 not found: ID does not exist" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.449994 4576 scope.go:117] "RemoveContainer" containerID="b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695" Dec 03 09:13:28 crc kubenswrapper[4576]: E1203 09:13:28.453999 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695\": container with ID starting with b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695 not found: ID does not exist" containerID="b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.454049 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695"} err="failed to get container status \"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695\": rpc error: code = NotFound desc = could not find container \"b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695\": container with ID starting with b026351891cac9c52ec724c71a84d718a7ad227787998ad0f35a6410a97dc695 not found: ID does not exist" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.711593 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.817997 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.818074 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.818837 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.818969 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.819066 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95cxm\" (UniqueName: \"kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.819107 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data\") pod \"9e725c45-22cc-4be1-8cdc-554f9af8a653\" (UID: \"9e725c45-22cc-4be1-8cdc-554f9af8a653\") " Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.820230 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.826616 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.845684 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.854692 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts" (OuterVolumeSpecName: "scripts") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.866825 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm" (OuterVolumeSpecName: "kube-api-access-95cxm") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "kube-api-access-95cxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.922637 4576 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e725c45-22cc-4be1-8cdc-554f9af8a653-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.922682 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95cxm\" (UniqueName: \"kubernetes.io/projected/9e725c45-22cc-4be1-8cdc-554f9af8a653-kube-api-access-95cxm\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.922693 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.922701 4576 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:28 crc kubenswrapper[4576]: I1203 09:13:28.997965 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.024631 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.049733 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data" (OuterVolumeSpecName: "config-data") pod "9e725c45-22cc-4be1-8cdc-554f9af8a653" (UID: "9e725c45-22cc-4be1-8cdc-554f9af8a653"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.125946 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e725c45-22cc-4be1-8cdc-554f9af8a653-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.310058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9e725c45-22cc-4be1-8cdc-554f9af8a653","Type":"ContainerDied","Data":"a10bc41517e14fdbd619b4965f9bb13494f87f18acd29536fbf0ab6670e9eb7b"} Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.310119 4576 scope.go:117] "RemoveContainer" containerID="6a236c29784a8be34119dc8020c880b30270a6fa8ee8f642103657e81af80278" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.310117 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.348642 4576 scope.go:117] "RemoveContainer" containerID="0832e015f2eadb9ceae7e56734f7e1c8011a7b01fed9eac59a662f40962a14cb" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.359597 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.369451 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.407908 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:29 crc kubenswrapper[4576]: E1203 09:13:29.408295 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408312 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" Dec 03 09:13:29 crc kubenswrapper[4576]: E1203 09:13:29.408337 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="probe" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408344 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="probe" Dec 03 09:13:29 crc kubenswrapper[4576]: E1203 09:13:29.408354 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408359 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" Dec 03 09:13:29 crc kubenswrapper[4576]: E1203 09:13:29.408377 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="cinder-scheduler" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408383 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="cinder-scheduler" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408543 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api-log" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408558 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="cinder-scheduler" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408576 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" containerName="probe" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.408591 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" containerName="barbican-api" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.409441 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.413918 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.428351 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431336 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431380 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431424 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431467 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431484 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.431538 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5hjl\" (UniqueName: \"kubernetes.io/projected/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-kube-api-access-k5hjl\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533564 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533611 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533653 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533700 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533715 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.533755 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5hjl\" (UniqueName: \"kubernetes.io/projected/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-kube-api-access-k5hjl\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.534051 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.538732 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.540472 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.541981 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.546518 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.565338 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5hjl\" (UniqueName: \"kubernetes.io/projected/11c5d059-87c8-4fe8-ad1b-e50f1b029e8b-kube-api-access-k5hjl\") pod \"cinder-scheduler-0\" (UID: \"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b\") " pod="openstack/cinder-scheduler-0" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.698285 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="114a8b03-2981-44ce-84e0-6ee5d96ab0b6" path="/var/lib/kubelet/pods/114a8b03-2981-44ce-84e0-6ee5d96ab0b6/volumes" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.699092 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e725c45-22cc-4be1-8cdc-554f9af8a653" path="/var/lib/kubelet/pods/9e725c45-22cc-4be1-8cdc-554f9af8a653/volumes" Dec 03 09:13:29 crc kubenswrapper[4576]: I1203 09:13:29.727650 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 09:13:30 crc kubenswrapper[4576]: I1203 09:13:30.396018 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 09:13:30 crc kubenswrapper[4576]: W1203 09:13:30.404072 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11c5d059_87c8_4fe8_ad1b_e50f1b029e8b.slice/crio-e5f7d30c1fcb7739ebd3f7f66c584e8c77a00628e9e59fc75e884cbb1f94c9dd WatchSource:0}: Error finding container e5f7d30c1fcb7739ebd3f7f66c584e8c77a00628e9e59fc75e884cbb1f94c9dd: Status 404 returned error can't find the container with id e5f7d30c1fcb7739ebd3f7f66c584e8c77a00628e9e59fc75e884cbb1f94c9dd Dec 03 09:13:31 crc kubenswrapper[4576]: I1203 09:13:31.363242 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b","Type":"ContainerStarted","Data":"e5f7d30c1fcb7739ebd3f7f66c584e8c77a00628e9e59fc75e884cbb1f94c9dd"} Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.394642 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b","Type":"ContainerStarted","Data":"9b3aa02816166692ae76d741112cd8a24c0fe6b3fb8649b0c0fc45f0c89f6439"} Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.400235 4576 generic.go:334] "Generic (PLEG): container finished" podID="83ab6db2-7b9e-4161-a064-56fe67986825" containerID="ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084" exitCode=137 Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.400291 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerDied","Data":"ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084"} Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.406668 4576 generic.go:334] "Generic (PLEG): container finished" podID="288ed488-5270-4966-b866-f9f015262989" containerID="bfc5b586bd45522f00431a1d74aa16840b99866538d2111ba820658948c181a3" exitCode=137 Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.406736 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d649695d8-6rtxn" event={"ID":"288ed488-5270-4966-b866-f9f015262989","Type":"ContainerDied","Data":"bfc5b586bd45522f00431a1d74aa16840b99866538d2111ba820658948c181a3"} Dec 03 09:13:32 crc kubenswrapper[4576]: I1203 09:13:32.406768 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d649695d8-6rtxn" event={"ID":"288ed488-5270-4966-b866-f9f015262989","Type":"ContainerStarted","Data":"8ec765ef7367e32a35871bbf668105eda818349406fff655790b4c87374c9def"} Dec 03 09:13:33 crc kubenswrapper[4576]: I1203 09:13:33.300061 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 09:13:33 crc kubenswrapper[4576]: I1203 09:13:33.445586 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerStarted","Data":"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e"} Dec 03 09:13:33 crc kubenswrapper[4576]: I1203 09:13:33.461864 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"11c5d059-87c8-4fe8-ad1b-e50f1b029e8b","Type":"ContainerStarted","Data":"00ec8618f931f37289e084299a6602a7c6963b2165d7fe0897b52b40e9c911a9"} Dec 03 09:13:33 crc kubenswrapper[4576]: I1203 09:13:33.482022 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.482002271 podStartE2EDuration="4.482002271s" podCreationTimestamp="2025-12-03 09:13:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:33.480581742 +0000 UTC m=+2020.866558726" watchObservedRunningTime="2025-12-03 09:13:33.482002271 +0000 UTC m=+2020.867979255" Dec 03 09:13:34 crc kubenswrapper[4576]: I1203 09:13:34.728689 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.233013 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.233661 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-central-agent" containerID="cri-o://911097f0a9e8cada88ce210c8b6e3adebca482981d855dfbb11e27d113f3b59e" gracePeriod=30 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.233680 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" containerID="cri-o://84fad1d87e8198d81331d0ccad98dbeb679c5449a353d8c8fe08dbe809622c04" gracePeriod=30 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.233771 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="sg-core" containerID="cri-o://80c57d3c0ad3fbb0e6afa522538c05ba7514a3151fa069b74598f692e556f6fb" gracePeriod=30 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.233847 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-notification-agent" containerID="cri-o://b4b98a8d6c628e682631dfcc383328641fe0d3cc133f9eeb49129680a5ce7511" gracePeriod=30 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.260212 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": EOF" Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.515095 4576 generic.go:334] "Generic (PLEG): container finished" podID="22680aad-860c-4aba-8796-ff097168686e" containerID="84fad1d87e8198d81331d0ccad98dbeb679c5449a353d8c8fe08dbe809622c04" exitCode=0 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.515154 4576 generic.go:334] "Generic (PLEG): container finished" podID="22680aad-860c-4aba-8796-ff097168686e" containerID="80c57d3c0ad3fbb0e6afa522538c05ba7514a3151fa069b74598f692e556f6fb" exitCode=2 Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.515175 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerDied","Data":"84fad1d87e8198d81331d0ccad98dbeb679c5449a353d8c8fe08dbe809622c04"} Dec 03 09:13:36 crc kubenswrapper[4576]: I1203 09:13:36.515200 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerDied","Data":"80c57d3c0ad3fbb0e6afa522538c05ba7514a3151fa069b74598f692e556f6fb"} Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.272563 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-b677c5dc5-pfc4n"] Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.277665 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.284734 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.295364 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.297910 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.303626 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-b677c5dc5-pfc4n"] Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.389448 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-internal-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.389683 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-log-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.389819 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-etc-swift\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.389866 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csksg\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-kube-api-access-csksg\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.389908 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-config-data\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.390003 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-combined-ca-bundle\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.390118 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-run-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.390200 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-public-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492325 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-internal-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492443 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-log-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492496 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-etc-swift\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492555 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csksg\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-kube-api-access-csksg\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492589 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-config-data\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492624 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-combined-ca-bundle\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492668 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-run-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.492713 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-public-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.496510 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-run-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.496659 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-log-httpd\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.507097 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-etc-swift\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.510300 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-internal-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.511105 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-config-data\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.516769 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-public-tls-certs\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.523291 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-combined-ca-bundle\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.544955 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csksg\" (UniqueName: \"kubernetes.io/projected/6bf8d1cf-0003-4e48-89f5-7ae1698f27ff-kube-api-access-csksg\") pod \"swift-proxy-b677c5dc5-pfc4n\" (UID: \"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff\") " pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.564272 4576 generic.go:334] "Generic (PLEG): container finished" podID="22680aad-860c-4aba-8796-ff097168686e" containerID="911097f0a9e8cada88ce210c8b6e3adebca482981d855dfbb11e27d113f3b59e" exitCode=0 Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.564343 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerDied","Data":"911097f0a9e8cada88ce210c8b6e3adebca482981d855dfbb11e27d113f3b59e"} Dec 03 09:13:37 crc kubenswrapper[4576]: I1203 09:13:37.597338 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:38 crc kubenswrapper[4576]: I1203 09:13:38.649829 4576 generic.go:334] "Generic (PLEG): container finished" podID="22680aad-860c-4aba-8796-ff097168686e" containerID="b4b98a8d6c628e682631dfcc383328641fe0d3cc133f9eeb49129680a5ce7511" exitCode=0 Dec 03 09:13:38 crc kubenswrapper[4576]: I1203 09:13:38.650101 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerDied","Data":"b4b98a8d6c628e682631dfcc383328641fe0d3cc133f9eeb49129680a5ce7511"} Dec 03 09:13:39 crc kubenswrapper[4576]: I1203 09:13:39.681512 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:13:39 crc kubenswrapper[4576]: I1203 09:13:39.682037 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:13:40 crc kubenswrapper[4576]: I1203 09:13:40.513217 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 09:13:41 crc kubenswrapper[4576]: I1203 09:13:41.558398 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": dial tcp 10.217.0.165:3000: connect: connection refused" Dec 03 09:13:41 crc kubenswrapper[4576]: I1203 09:13:41.745665 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:13:41 crc kubenswrapper[4576]: I1203 09:13:41.746291 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:13:41 crc kubenswrapper[4576]: I1203 09:13:41.908494 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:13:41 crc kubenswrapper[4576]: I1203 09:13:41.908558 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:13:42 crc kubenswrapper[4576]: I1203 09:13:42.749363 4576 generic.go:334] "Generic (PLEG): container finished" podID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerID="9bed9b661351fcb0057b958916fa4d6aaadf7fc0e31079310fd45d3d1c0b6ffa" exitCode=137 Dec 03 09:13:42 crc kubenswrapper[4576]: I1203 09:13:42.749743 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerDied","Data":"9bed9b661351fcb0057b958916fa4d6aaadf7fc0e31079310fd45d3d1c0b6ffa"} Dec 03 09:13:43 crc kubenswrapper[4576]: I1203 09:13:43.700409 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": dial tcp 10.217.0.163:8776: connect: connection refused" Dec 03 09:13:46 crc kubenswrapper[4576]: E1203 09:13:46.234901 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Dec 03 09:13:46 crc kubenswrapper[4576]: E1203 09:13:46.235628 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nc7h94h59bh696h6ch57bh67h578hc7h665hc7hfbhdfh699h5d9h688h5f9h565h698h76hcbh65dh5b5hd7h96h6h57dh5cdh548h87h59h545q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-69fbh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(649a142a-4649-45fb-bdba-11fcc838bf97): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:13:46 crc kubenswrapper[4576]: E1203 09:13:46.237165 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="649a142a-4649-45fb-bdba-11fcc838bf97" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.834475 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22680aad-860c-4aba-8796-ff097168686e","Type":"ContainerDied","Data":"e8a131d9ecaec9e78ab30042925de4c2e360926b9a0b7e5db54a79fdbe43903e"} Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.834762 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8a131d9ecaec9e78ab30042925de4c2e360926b9a0b7e5db54a79fdbe43903e" Dec 03 09:13:46 crc kubenswrapper[4576]: E1203 09:13:46.835597 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="649a142a-4649-45fb-bdba-11fcc838bf97" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.845208 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940420 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67sfp\" (UniqueName: \"kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940494 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940589 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940614 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940641 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.940659 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml\") pod \"22680aad-860c-4aba-8796-ff097168686e\" (UID: \"22680aad-860c-4aba-8796-ff097168686e\") " Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.943602 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.947050 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.952723 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp" (OuterVolumeSpecName: "kube-api-access-67sfp") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "kube-api-access-67sfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.952904 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts" (OuterVolumeSpecName: "scripts") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:46 crc kubenswrapper[4576]: I1203 09:13:46.992852 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.044440 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.044807 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.044822 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.044834 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67sfp\" (UniqueName: \"kubernetes.io/projected/22680aad-860c-4aba-8796-ff097168686e-kube-api-access-67sfp\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.044847 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22680aad-860c-4aba-8796-ff097168686e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.070380 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.117013 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146408 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146501 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146640 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97226\" (UniqueName: \"kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146713 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146768 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146824 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.146950 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id\") pod \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\" (UID: \"23230cc9-ccdd-4985-a6e0-6b9542da25bc\") " Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.147363 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.147421 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.147798 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs" (OuterVolumeSpecName: "logs") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.156994 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts" (OuterVolumeSpecName: "scripts") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.157243 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226" (OuterVolumeSpecName: "kube-api-access-97226") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "kube-api-access-97226". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.163660 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.180715 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data" (OuterVolumeSpecName: "config-data") pod "22680aad-860c-4aba-8796-ff097168686e" (UID: "22680aad-860c-4aba-8796-ff097168686e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.222672 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.253956 4576 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.253978 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97226\" (UniqueName: \"kubernetes.io/projected/23230cc9-ccdd-4985-a6e0-6b9542da25bc-kube-api-access-97226\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.253990 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.253999 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.254006 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23230cc9-ccdd-4985-a6e0-6b9542da25bc-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.254014 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22680aad-860c-4aba-8796-ff097168686e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.254022 4576 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23230cc9-ccdd-4985-a6e0-6b9542da25bc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.282695 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data" (OuterVolumeSpecName: "config-data") pod "23230cc9-ccdd-4985-a6e0-6b9542da25bc" (UID: "23230cc9-ccdd-4985-a6e0-6b9542da25bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.355572 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23230cc9-ccdd-4985-a6e0-6b9542da25bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.412408 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-b677c5dc5-pfc4n"] Dec 03 09:13:47 crc kubenswrapper[4576]: W1203 09:13:47.420684 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bf8d1cf_0003_4e48_89f5_7ae1698f27ff.slice/crio-6582713e17b5576e1757a84902a37c1227a85f563698c8214463135cb3f0569a WatchSource:0}: Error finding container 6582713e17b5576e1757a84902a37c1227a85f563698c8214463135cb3f0569a: Status 404 returned error can't find the container with id 6582713e17b5576e1757a84902a37c1227a85f563698c8214463135cb3f0569a Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.843043 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b677c5dc5-pfc4n" event={"ID":"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff","Type":"ContainerStarted","Data":"6582713e17b5576e1757a84902a37c1227a85f563698c8214463135cb3f0569a"} Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.845217 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.845340 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"23230cc9-ccdd-4985-a6e0-6b9542da25bc","Type":"ContainerDied","Data":"2f60b6010678375d873be5a1c40d3cbbbe4d14608aa8cedf0162791cd72f8686"} Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.845431 4576 scope.go:117] "RemoveContainer" containerID="9bed9b661351fcb0057b958916fa4d6aaadf7fc0e31079310fd45d3d1c0b6ffa" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.845545 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.869211 4576 scope.go:117] "RemoveContainer" containerID="96bd187c3393bb473bb0fa298313527f161e11b189b563b98adce3367c233ebf" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.883413 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.897618 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.918893 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.947459 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.953944 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954296 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-notification-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954315 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-notification-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954342 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954351 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954366 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954373 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954387 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="sg-core" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954392 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="sg-core" Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954401 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api-log" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954407 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api-log" Dec 03 09:13:47 crc kubenswrapper[4576]: E1203 09:13:47.954422 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-central-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954428 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-central-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954594 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="proxy-httpd" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954607 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-central-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954619 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="sg-core" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954628 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954640 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" containerName="cinder-api-log" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.954655 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="22680aad-860c-4aba-8796-ff097168686e" containerName="ceilometer-notification-agent" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.955556 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.967570 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.967771 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.967822 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 09:13:47 crc kubenswrapper[4576]: I1203 09:13:47.985677 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.011606 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.014052 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.020122 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.020276 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.047027 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076491 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076569 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076593 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076638 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076702 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ee570a8-cd83-4db6-bffa-080a2dae8552-logs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076717 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb47h\" (UniqueName: \"kubernetes.io/projected/9ee570a8-cd83-4db6-bffa-080a2dae8552-kube-api-access-nb47h\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076740 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076758 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-scripts\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076775 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ee570a8-cd83-4db6-bffa-080a2dae8552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076791 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076812 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076840 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076856 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076882 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076900 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgwgf\" (UniqueName: \"kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.076920 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179636 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-scripts\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179685 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ee570a8-cd83-4db6-bffa-080a2dae8552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179709 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179732 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179763 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179782 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179810 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179828 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgwgf\" (UniqueName: \"kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179852 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179874 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179903 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179922 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.179963 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.180028 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ee570a8-cd83-4db6-bffa-080a2dae8552-logs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.180045 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb47h\" (UniqueName: \"kubernetes.io/projected/9ee570a8-cd83-4db6-bffa-080a2dae8552-kube-api-access-nb47h\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.180068 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.183169 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9ee570a8-cd83-4db6-bffa-080a2dae8552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.186785 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.187130 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.187713 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.187974 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ee570a8-cd83-4db6-bffa-080a2dae8552-logs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.188876 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.190838 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-scripts\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.196181 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.198101 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.203971 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.219234 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-config-data-custom\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.219382 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.220008 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.220148 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ee570a8-cd83-4db6-bffa-080a2dae8552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.240348 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgwgf\" (UniqueName: \"kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf\") pod \"ceilometer-0\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.259202 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb47h\" (UniqueName: \"kubernetes.io/projected/9ee570a8-cd83-4db6-bffa-080a2dae8552-kube-api-access-nb47h\") pod \"cinder-api-0\" (UID: \"9ee570a8-cd83-4db6-bffa-080a2dae8552\") " pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.334140 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.375736 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.875367 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b677c5dc5-pfc4n" event={"ID":"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff","Type":"ContainerStarted","Data":"0fb7e1cb0f36ec757416c9e2e8a3f4b36c18656ed18f0945ade81931e1d08259"} Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.875780 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-b677c5dc5-pfc4n" event={"ID":"6bf8d1cf-0003-4e48-89f5-7ae1698f27ff","Type":"ContainerStarted","Data":"be69e4e8702283a8f06b77e755df584bd1ec420d99f797fd6e9e687947cd5244"} Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.875853 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.875884 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.913559 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-b677c5dc5-pfc4n" podStartSLOduration=11.913540541 podStartE2EDuration="11.913540541s" podCreationTimestamp="2025-12-03 09:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:48.909338126 +0000 UTC m=+2036.295315130" watchObservedRunningTime="2025-12-03 09:13:48.913540541 +0000 UTC m=+2036.299517515" Dec 03 09:13:48 crc kubenswrapper[4576]: I1203 09:13:48.949978 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.068675 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 09:13:49 crc kubenswrapper[4576]: W1203 09:13:49.071171 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ee570a8_cd83_4db6_bffa_080a2dae8552.slice/crio-f26a1fbea91bc342f39ab8a083a9e9b238183f3bb2378a3dbcfd57c082f0fecb WatchSource:0}: Error finding container f26a1fbea91bc342f39ab8a083a9e9b238183f3bb2378a3dbcfd57c082f0fecb: Status 404 returned error can't find the container with id f26a1fbea91bc342f39ab8a083a9e9b238183f3bb2378a3dbcfd57c082f0fecb Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.688067 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22680aad-860c-4aba-8796-ff097168686e" path="/var/lib/kubelet/pods/22680aad-860c-4aba-8796-ff097168686e/volumes" Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.689295 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23230cc9-ccdd-4985-a6e0-6b9542da25bc" path="/var/lib/kubelet/pods/23230cc9-ccdd-4985-a6e0-6b9542da25bc/volumes" Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.897605 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ee570a8-cd83-4db6-bffa-080a2dae8552","Type":"ContainerStarted","Data":"b77d6c77350ef690f272f977f7f3193881b656c3346eb8c0cd0e7aa3a9c9f88a"} Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.897888 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ee570a8-cd83-4db6-bffa-080a2dae8552","Type":"ContainerStarted","Data":"f26a1fbea91bc342f39ab8a083a9e9b238183f3bb2378a3dbcfd57c082f0fecb"} Dec 03 09:13:49 crc kubenswrapper[4576]: I1203 09:13:49.914679 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerStarted","Data":"5c1e28d85bd31260d8605efaa14cde04ce7f6ed97fa8c1d2e6ceefb6053e6fa4"} Dec 03 09:13:50 crc kubenswrapper[4576]: I1203 09:13:50.923939 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9ee570a8-cd83-4db6-bffa-080a2dae8552","Type":"ContainerStarted","Data":"13f753b4c92ef2508d98cf790d3275e1888d7716b1b5f6b96f3cc7fe2d7c77e8"} Dec 03 09:13:50 crc kubenswrapper[4576]: I1203 09:13:50.924253 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 09:13:50 crc kubenswrapper[4576]: I1203 09:13:50.950180 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.950158514 podStartE2EDuration="3.950158514s" podCreationTimestamp="2025-12-03 09:13:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:50.944947472 +0000 UTC m=+2038.330924466" watchObservedRunningTime="2025-12-03 09:13:50.950158514 +0000 UTC m=+2038.336135498" Dec 03 09:13:51 crc kubenswrapper[4576]: I1203 09:13:51.746650 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:13:51 crc kubenswrapper[4576]: I1203 09:13:51.909471 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 09:13:51 crc kubenswrapper[4576]: I1203 09:13:51.934249 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerStarted","Data":"0591aed9d23ecf817ade50c383df2325ce5a50c9f766a4d85666cf60f9fdb024"} Dec 03 09:13:52 crc kubenswrapper[4576]: I1203 09:13:52.287253 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:13:52 crc kubenswrapper[4576]: I1203 09:13:52.613303 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:52 crc kubenswrapper[4576]: I1203 09:13:52.945396 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerStarted","Data":"1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008"} Dec 03 09:13:52 crc kubenswrapper[4576]: I1203 09:13:52.945439 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerStarted","Data":"e78bc7ac09b109886b894a3f14f211502703e0b618cfa910defcb0c19b00bd2c"} Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.333838 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-s4j5r"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.335064 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.352028 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-s4j5r"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.403719 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.403769 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56rzd\" (UniqueName: \"kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.440659 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-8hs7j"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.441828 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.450714 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8hs7j"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.505578 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.505629 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56rzd\" (UniqueName: \"kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.505672 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.505806 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw44k\" (UniqueName: \"kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.506551 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.541155 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56rzd\" (UniqueName: \"kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd\") pod \"nova-api-db-create-s4j5r\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.545986 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-p9nqp"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.547254 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.560167 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5ba3-account-create-update-hkhpv"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.561325 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.568270 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.573803 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p9nqp"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.594409 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5ba3-account-create-update-hkhpv"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.607245 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqvxf\" (UniqueName: \"kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.607317 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.607369 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.607502 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw44k\" (UniqueName: \"kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.608622 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.640204 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw44k\" (UniqueName: \"kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k\") pod \"nova-cell0-db-create-8hs7j\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.656477 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.709717 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqvxf\" (UniqueName: \"kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.709790 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.709829 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.709875 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dds4\" (UniqueName: \"kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.710742 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.755313 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqvxf\" (UniqueName: \"kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf\") pod \"nova-cell1-db-create-p9nqp\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.760228 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.813619 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.813696 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dds4\" (UniqueName: \"kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.815726 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.865144 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dds4\" (UniqueName: \"kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4\") pod \"nova-api-5ba3-account-create-update-hkhpv\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.890979 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ddfc-account-create-update-55b6r"] Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.927769 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.939365 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 09:13:53 crc kubenswrapper[4576]: I1203 09:13:53.940174 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:53.996520 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.053657 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ddfc-account-create-update-55b6r"] Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.111232 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1cc4-account-create-update-ltg7z"] Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.115996 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.116157 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjzh5\" (UniqueName: \"kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.120491 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.124807 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.130519 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1cc4-account-create-update-ltg7z"] Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.230165 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.230516 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.230598 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjzh5\" (UniqueName: \"kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.230686 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4gn2\" (UniqueName: \"kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.231656 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.294293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjzh5\" (UniqueName: \"kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5\") pod \"nova-cell0-ddfc-account-create-update-55b6r\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.315641 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.334099 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.334212 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4gn2\" (UniqueName: \"kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.358551 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4gn2\" (UniqueName: \"kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.497591 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6f978d8b99-7d88p" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.511032 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts\") pod \"nova-cell1-1cc4-account-create-update-ltg7z\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.584937 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.585201 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-667d896bdd-mtmgs" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-api" containerID="cri-o://301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921" gracePeriod=30 Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.585648 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-667d896bdd-mtmgs" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" containerID="cri-o://ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683" gracePeriod=30 Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.653983 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.721049 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-s4j5r"] Dec 03 09:13:54 crc kubenswrapper[4576]: I1203 09:13:54.730504 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8hs7j"] Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.138502 4576 generic.go:334] "Generic (PLEG): container finished" podID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerID="ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683" exitCode=0 Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.138825 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerDied","Data":"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683"} Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.142329 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s4j5r" event={"ID":"af864d88-4394-47c6-883a-85d47c2a6832","Type":"ContainerStarted","Data":"0b9b4e2fb672e088dfc9622960d4e1fdee42ff5509306d687ca5c4c5be2a0c07"} Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.155830 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p9nqp"] Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.181766 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8hs7j" event={"ID":"50764171-616e-4be8-b380-a5923d51773a","Type":"ContainerStarted","Data":"ed25d0714a8cdee89f17e28f128807a6316ce6f7eafdc75eedcd788654f91392"} Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.210672 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5ba3-account-create-update-hkhpv"] Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.455035 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ddfc-account-create-update-55b6r"] Dec 03 09:13:55 crc kubenswrapper[4576]: I1203 09:13:55.484807 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1cc4-account-create-update-ltg7z"] Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.216182 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" event={"ID":"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a","Type":"ContainerStarted","Data":"b45b3dd0b3196523db64d7cab40faddcb4ff25c93c2abad8317748f50d6d3f53"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.217264 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" event={"ID":"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a","Type":"ContainerStarted","Data":"dd04b41242c375a6db1210795df744a09a5ce8cfdf868ba78a25729789d7fddc"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.246267 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" podStartSLOduration=3.246246806 podStartE2EDuration="3.246246806s" podCreationTimestamp="2025-12-03 09:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:56.237864757 +0000 UTC m=+2043.623841741" watchObservedRunningTime="2025-12-03 09:13:56.246246806 +0000 UTC m=+2043.632223790" Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.262153 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerStarted","Data":"8db04dc6377036781d391edabbc632ad3f319e06d329b9a28f6dcb236fcd08d5"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.263084 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.272567 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s4j5r" event={"ID":"af864d88-4394-47c6-883a-85d47c2a6832","Type":"ContainerStarted","Data":"70b9250dc34f79d6e4d41960573875bf9e59c0987fa78766142d3cbe1a522085"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.280777 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p9nqp" event={"ID":"9878d95a-72e1-456b-aee3-f0bf43432249","Type":"ContainerStarted","Data":"224e9b277c7f51f16558f795a6c8b95f9336adb8a6715337ed270828c032601a"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.280825 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p9nqp" event={"ID":"9878d95a-72e1-456b-aee3-f0bf43432249","Type":"ContainerStarted","Data":"1e570089e88901cf8a317e8baefc9f8ddbc462f5d8a7be4a676733cd61c0dd55"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.293328 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" event={"ID":"66f7791e-f5ad-4d75-b250-d584031bc805","Type":"ContainerStarted","Data":"24e4d3f1f0dfa4feb110578ac887090fc07b5d05b397cb3b26a34e637bbfb8a9"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.315176 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8hs7j" event={"ID":"50764171-616e-4be8-b380-a5923d51773a","Type":"ContainerStarted","Data":"7c71269ee9bf011cb643c487f7a03babe292deef523a9a0c9342dfd761f803a8"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.330824 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" event={"ID":"c224894c-4c8f-488a-a01e-69e2f721f329","Type":"ContainerStarted","Data":"5e8878ae52c87ff85fa2a4e334e3926cc2176022c259547469e38ce8cb6d0914"} Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.348908 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.19915275 podStartE2EDuration="9.348887917s" podCreationTimestamp="2025-12-03 09:13:47 +0000 UTC" firstStartedPulling="2025-12-03 09:13:48.94099605 +0000 UTC m=+2036.326973034" lastFinishedPulling="2025-12-03 09:13:54.090731217 +0000 UTC m=+2041.476708201" observedRunningTime="2025-12-03 09:13:56.292830817 +0000 UTC m=+2043.678807801" watchObservedRunningTime="2025-12-03 09:13:56.348887917 +0000 UTC m=+2043.734864901" Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.367072 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-s4j5r" podStartSLOduration=3.367052503 podStartE2EDuration="3.367052503s" podCreationTimestamp="2025-12-03 09:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:56.328134071 +0000 UTC m=+2043.714111075" watchObservedRunningTime="2025-12-03 09:13:56.367052503 +0000 UTC m=+2043.753029487" Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.421837 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-p9nqp" podStartSLOduration=3.421815608 podStartE2EDuration="3.421815608s" podCreationTimestamp="2025-12-03 09:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:56.366582491 +0000 UTC m=+2043.752559475" watchObservedRunningTime="2025-12-03 09:13:56.421815608 +0000 UTC m=+2043.807792592" Dec 03 09:13:56 crc kubenswrapper[4576]: I1203 09:13:56.440018 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-8hs7j" podStartSLOduration=3.439996994 podStartE2EDuration="3.439996994s" podCreationTimestamp="2025-12-03 09:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:13:56.380638464 +0000 UTC m=+2043.766615458" watchObservedRunningTime="2025-12-03 09:13:56.439996994 +0000 UTC m=+2043.825973978" Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.341749 4576 generic.go:334] "Generic (PLEG): container finished" podID="af864d88-4394-47c6-883a-85d47c2a6832" containerID="70b9250dc34f79d6e4d41960573875bf9e59c0987fa78766142d3cbe1a522085" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.341824 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s4j5r" event={"ID":"af864d88-4394-47c6-883a-85d47c2a6832","Type":"ContainerDied","Data":"70b9250dc34f79d6e4d41960573875bf9e59c0987fa78766142d3cbe1a522085"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.344375 4576 generic.go:334] "Generic (PLEG): container finished" podID="9878d95a-72e1-456b-aee3-f0bf43432249" containerID="224e9b277c7f51f16558f795a6c8b95f9336adb8a6715337ed270828c032601a" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.344441 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p9nqp" event={"ID":"9878d95a-72e1-456b-aee3-f0bf43432249","Type":"ContainerDied","Data":"224e9b277c7f51f16558f795a6c8b95f9336adb8a6715337ed270828c032601a"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.346465 4576 generic.go:334] "Generic (PLEG): container finished" podID="66f7791e-f5ad-4d75-b250-d584031bc805" containerID="b3b0dd228776a90a37239c3bb7ca32bf2ce3e8d8f05f4691318c7669c7c8ff4b" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.346544 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" event={"ID":"66f7791e-f5ad-4d75-b250-d584031bc805","Type":"ContainerDied","Data":"b3b0dd228776a90a37239c3bb7ca32bf2ce3e8d8f05f4691318c7669c7c8ff4b"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.348593 4576 generic.go:334] "Generic (PLEG): container finished" podID="50764171-616e-4be8-b380-a5923d51773a" containerID="7c71269ee9bf011cb643c487f7a03babe292deef523a9a0c9342dfd761f803a8" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.348636 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8hs7j" event={"ID":"50764171-616e-4be8-b380-a5923d51773a","Type":"ContainerDied","Data":"7c71269ee9bf011cb643c487f7a03babe292deef523a9a0c9342dfd761f803a8"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.352107 4576 generic.go:334] "Generic (PLEG): container finished" podID="c224894c-4c8f-488a-a01e-69e2f721f329" containerID="a52d278b4603a4a58597ce7e81dcac7013f927bfa9a6b2e1e418981934a4929a" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.352238 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" event={"ID":"c224894c-4c8f-488a-a01e-69e2f721f329","Type":"ContainerDied","Data":"a52d278b4603a4a58597ce7e81dcac7013f927bfa9a6b2e1e418981934a4929a"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.356760 4576 generic.go:334] "Generic (PLEG): container finished" podID="f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" containerID="b45b3dd0b3196523db64d7cab40faddcb4ff25c93c2abad8317748f50d6d3f53" exitCode=0 Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.358102 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" event={"ID":"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a","Type":"ContainerDied","Data":"b45b3dd0b3196523db64d7cab40faddcb4ff25c93c2abad8317748f50d6d3f53"} Dec 03 09:13:57 crc kubenswrapper[4576]: I1203 09:13:57.610658 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-b677c5dc5-pfc4n" Dec 03 09:13:58 crc kubenswrapper[4576]: I1203 09:13:58.205589 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:13:58 crc kubenswrapper[4576]: I1203 09:13:58.365583 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-central-agent" containerID="cri-o://0591aed9d23ecf817ade50c383df2325ce5a50c9f766a4d85666cf60f9fdb024" gracePeriod=30 Dec 03 09:13:58 crc kubenswrapper[4576]: I1203 09:13:58.366141 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="proxy-httpd" containerID="cri-o://8db04dc6377036781d391edabbc632ad3f319e06d329b9a28f6dcb236fcd08d5" gracePeriod=30 Dec 03 09:13:58 crc kubenswrapper[4576]: I1203 09:13:58.366184 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="sg-core" containerID="cri-o://1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008" gracePeriod=30 Dec 03 09:13:58 crc kubenswrapper[4576]: I1203 09:13:58.366215 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-notification-agent" containerID="cri-o://e78bc7ac09b109886b894a3f14f211502703e0b618cfa910defcb0c19b00bd2c" gracePeriod=30 Dec 03 09:13:58 crc kubenswrapper[4576]: E1203 09:13:58.758022 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16d4fe18_1b2e_4fcd_bbff_228c62eda882.slice/crio-conmon-1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16d4fe18_1b2e_4fcd_bbff_228c62eda882.slice/crio-1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.056483 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.176169 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjzh5\" (UniqueName: \"kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5\") pod \"66f7791e-f5ad-4d75-b250-d584031bc805\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.176443 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts\") pod \"66f7791e-f5ad-4d75-b250-d584031bc805\" (UID: \"66f7791e-f5ad-4d75-b250-d584031bc805\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.177464 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66f7791e-f5ad-4d75-b250-d584031bc805" (UID: "66f7791e-f5ad-4d75-b250-d584031bc805"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.213748 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5" (OuterVolumeSpecName: "kube-api-access-zjzh5") pod "66f7791e-f5ad-4d75-b250-d584031bc805" (UID: "66f7791e-f5ad-4d75-b250-d584031bc805"). InnerVolumeSpecName "kube-api-access-zjzh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.278172 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f7791e-f5ad-4d75-b250-d584031bc805-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.278212 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjzh5\" (UniqueName: \"kubernetes.io/projected/66f7791e-f5ad-4d75-b250-d584031bc805-kube-api-access-zjzh5\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.381687 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.418750 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.421363 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.421585 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" event={"ID":"66f7791e-f5ad-4d75-b250-d584031bc805","Type":"ContainerDied","Data":"24e4d3f1f0dfa4feb110578ac887090fc07b5d05b397cb3b26a34e637bbfb8a9"} Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.421612 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24e4d3f1f0dfa4feb110578ac887090fc07b5d05b397cb3b26a34e637bbfb8a9" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.421691 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ddfc-account-create-update-55b6r" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.424852 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.424947 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.425736 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" event={"ID":"c224894c-4c8f-488a-a01e-69e2f721f329","Type":"ContainerDied","Data":"5e8878ae52c87ff85fa2a4e334e3926cc2176022c259547469e38ce8cb6d0914"} Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.425760 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e8878ae52c87ff85fa2a4e334e3926cc2176022c259547469e38ce8cb6d0914" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.425803 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cc4-account-create-update-ltg7z" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431040 4576 generic.go:334] "Generic (PLEG): container finished" podID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerID="8db04dc6377036781d391edabbc632ad3f319e06d329b9a28f6dcb236fcd08d5" exitCode=0 Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431064 4576 generic.go:334] "Generic (PLEG): container finished" podID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerID="1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008" exitCode=2 Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431072 4576 generic.go:334] "Generic (PLEG): container finished" podID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerID="e78bc7ac09b109886b894a3f14f211502703e0b618cfa910defcb0c19b00bd2c" exitCode=0 Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431096 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerDied","Data":"8db04dc6377036781d391edabbc632ad3f319e06d329b9a28f6dcb236fcd08d5"} Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431119 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerDied","Data":"1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008"} Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.431130 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerDied","Data":"e78bc7ac09b109886b894a3f14f211502703e0b618cfa910defcb0c19b00bd2c"} Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484196 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts\") pod \"c224894c-4c8f-488a-a01e-69e2f721f329\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484259 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts\") pod \"af864d88-4394-47c6-883a-85d47c2a6832\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484296 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts\") pod \"9878d95a-72e1-456b-aee3-f0bf43432249\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56rzd\" (UniqueName: \"kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd\") pod \"af864d88-4394-47c6-883a-85d47c2a6832\" (UID: \"af864d88-4394-47c6-883a-85d47c2a6832\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484479 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dds4\" (UniqueName: \"kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4\") pod \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484553 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts\") pod \"50764171-616e-4be8-b380-a5923d51773a\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484587 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4gn2\" (UniqueName: \"kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2\") pod \"c224894c-4c8f-488a-a01e-69e2f721f329\" (UID: \"c224894c-4c8f-488a-a01e-69e2f721f329\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484622 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqvxf\" (UniqueName: \"kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf\") pod \"9878d95a-72e1-456b-aee3-f0bf43432249\" (UID: \"9878d95a-72e1-456b-aee3-f0bf43432249\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484652 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts\") pod \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\" (UID: \"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.484677 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw44k\" (UniqueName: \"kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k\") pod \"50764171-616e-4be8-b380-a5923d51773a\" (UID: \"50764171-616e-4be8-b380-a5923d51773a\") " Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.489442 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9878d95a-72e1-456b-aee3-f0bf43432249" (UID: "9878d95a-72e1-456b-aee3-f0bf43432249"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.489480 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c224894c-4c8f-488a-a01e-69e2f721f329" (UID: "c224894c-4c8f-488a-a01e-69e2f721f329"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.490160 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "50764171-616e-4be8-b380-a5923d51773a" (UID: "50764171-616e-4be8-b380-a5923d51773a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.490690 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af864d88-4394-47c6-883a-85d47c2a6832" (UID: "af864d88-4394-47c6-883a-85d47c2a6832"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.491079 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" (UID: "f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.491195 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4" (OuterVolumeSpecName: "kube-api-access-2dds4") pod "f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" (UID: "f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a"). InnerVolumeSpecName "kube-api-access-2dds4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.502782 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k" (OuterVolumeSpecName: "kube-api-access-qw44k") pod "50764171-616e-4be8-b380-a5923d51773a" (UID: "50764171-616e-4be8-b380-a5923d51773a"). InnerVolumeSpecName "kube-api-access-qw44k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.503152 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf" (OuterVolumeSpecName: "kube-api-access-dqvxf") pod "9878d95a-72e1-456b-aee3-f0bf43432249" (UID: "9878d95a-72e1-456b-aee3-f0bf43432249"). InnerVolumeSpecName "kube-api-access-dqvxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.504634 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd" (OuterVolumeSpecName: "kube-api-access-56rzd") pod "af864d88-4394-47c6-883a-85d47c2a6832" (UID: "af864d88-4394-47c6-883a-85d47c2a6832"). InnerVolumeSpecName "kube-api-access-56rzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.510974 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2" (OuterVolumeSpecName: "kube-api-access-n4gn2") pod "c224894c-4c8f-488a-a01e-69e2f721f329" (UID: "c224894c-4c8f-488a-a01e-69e2f721f329"). InnerVolumeSpecName "kube-api-access-n4gn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590790 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50764171-616e-4be8-b380-a5923d51773a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590833 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4gn2\" (UniqueName: \"kubernetes.io/projected/c224894c-4c8f-488a-a01e-69e2f721f329-kube-api-access-n4gn2\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590844 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqvxf\" (UniqueName: \"kubernetes.io/projected/9878d95a-72e1-456b-aee3-f0bf43432249-kube-api-access-dqvxf\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590853 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590862 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw44k\" (UniqueName: \"kubernetes.io/projected/50764171-616e-4be8-b380-a5923d51773a-kube-api-access-qw44k\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590872 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c224894c-4c8f-488a-a01e-69e2f721f329-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590881 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af864d88-4394-47c6-883a-85d47c2a6832-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.590891 4576 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9878d95a-72e1-456b-aee3-f0bf43432249-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.592001 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56rzd\" (UniqueName: \"kubernetes.io/projected/af864d88-4394-47c6-883a-85d47c2a6832-kube-api-access-56rzd\") on node \"crc\" DevicePath \"\"" Dec 03 09:13:59 crc kubenswrapper[4576]: I1203 09:13:59.592025 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dds4\" (UniqueName: \"kubernetes.io/projected/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a-kube-api-access-2dds4\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.442948 4576 generic.go:334] "Generic (PLEG): container finished" podID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerID="0591aed9d23ecf817ade50c383df2325ce5a50c9f766a4d85666cf60f9fdb024" exitCode=0 Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.443103 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerDied","Data":"0591aed9d23ecf817ade50c383df2325ce5a50c9f766a4d85666cf60f9fdb024"} Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.444815 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s4j5r" event={"ID":"af864d88-4394-47c6-883a-85d47c2a6832","Type":"ContainerDied","Data":"0b9b4e2fb672e088dfc9622960d4e1fdee42ff5509306d687ca5c4c5be2a0c07"} Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.444849 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b9b4e2fb672e088dfc9622960d4e1fdee42ff5509306d687ca5c4c5be2a0c07" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.444914 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s4j5r" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.446903 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p9nqp" event={"ID":"9878d95a-72e1-456b-aee3-f0bf43432249","Type":"ContainerDied","Data":"1e570089e88901cf8a317e8baefc9f8ddbc462f5d8a7be4a676733cd61c0dd55"} Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.446925 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e570089e88901cf8a317e8baefc9f8ddbc462f5d8a7be4a676733cd61c0dd55" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.446968 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p9nqp" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.449092 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8hs7j" event={"ID":"50764171-616e-4be8-b380-a5923d51773a","Type":"ContainerDied","Data":"ed25d0714a8cdee89f17e28f128807a6316ce6f7eafdc75eedcd788654f91392"} Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.449116 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed25d0714a8cdee89f17e28f128807a6316ce6f7eafdc75eedcd788654f91392" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.449156 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8hs7j" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.466754 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" event={"ID":"f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a","Type":"ContainerDied","Data":"dd04b41242c375a6db1210795df744a09a5ce8cfdf868ba78a25729789d7fddc"} Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.466793 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd04b41242c375a6db1210795df744a09a5ce8cfdf868ba78a25729789d7fddc" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.466848 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5ba3-account-create-update-hkhpv" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.637430 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.637677 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-log" containerID="cri-o://0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d" gracePeriod=30 Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.638033 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-httpd" containerID="cri-o://c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5" gracePeriod=30 Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.709947 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.814719 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815061 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815126 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815153 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815190 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgwgf\" (UniqueName: \"kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815213 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.815295 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data\") pod \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\" (UID: \"16d4fe18-1b2e-4fcd-bbff-228c62eda882\") " Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.817874 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.821973 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts" (OuterVolumeSpecName: "scripts") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.822232 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.835440 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf" (OuterVolumeSpecName: "kube-api-access-sgwgf") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "kube-api-access-sgwgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.884693 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.921861 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.923438 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.923564 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16d4fe18-1b2e-4fcd-bbff-228c62eda882-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.923666 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgwgf\" (UniqueName: \"kubernetes.io/projected/16d4fe18-1b2e-4fcd-bbff-228c62eda882-kube-api-access-sgwgf\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.923775 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:00 crc kubenswrapper[4576]: I1203 09:14:00.994631 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.030649 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data" (OuterVolumeSpecName: "config-data") pod "16d4fe18-1b2e-4fcd-bbff-228c62eda882" (UID: "16d4fe18-1b2e-4fcd-bbff-228c62eda882"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.033838 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.033876 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16d4fe18-1b2e-4fcd-bbff-228c62eda882-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.201429 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.244951 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config\") pod \"2e5e255c-5481-4fc1-937f-53014b9b1da3\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.245019 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config\") pod \"2e5e255c-5481-4fc1-937f-53014b9b1da3\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.245475 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm6gc\" (UniqueName: \"kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc\") pod \"2e5e255c-5481-4fc1-937f-53014b9b1da3\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.245511 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs\") pod \"2e5e255c-5481-4fc1-937f-53014b9b1da3\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.245571 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle\") pod \"2e5e255c-5481-4fc1-937f-53014b9b1da3\" (UID: \"2e5e255c-5481-4fc1-937f-53014b9b1da3\") " Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.268046 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc" (OuterVolumeSpecName: "kube-api-access-rm6gc") pod "2e5e255c-5481-4fc1-937f-53014b9b1da3" (UID: "2e5e255c-5481-4fc1-937f-53014b9b1da3"). InnerVolumeSpecName "kube-api-access-rm6gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.273432 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2e5e255c-5481-4fc1-937f-53014b9b1da3" (UID: "2e5e255c-5481-4fc1-937f-53014b9b1da3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.342899 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e5e255c-5481-4fc1-937f-53014b9b1da3" (UID: "2e5e255c-5481-4fc1-937f-53014b9b1da3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.348576 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm6gc\" (UniqueName: \"kubernetes.io/projected/2e5e255c-5481-4fc1-937f-53014b9b1da3-kube-api-access-rm6gc\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.348598 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.348626 4576 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.372862 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config" (OuterVolumeSpecName: "config") pod "2e5e255c-5481-4fc1-937f-53014b9b1da3" (UID: "2e5e255c-5481-4fc1-937f-53014b9b1da3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.394789 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2e5e255c-5481-4fc1-937f-53014b9b1da3" (UID: "2e5e255c-5481-4fc1-937f-53014b9b1da3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.449728 4576 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.449757 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e5e255c-5481-4fc1-937f-53014b9b1da3-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.477443 4576 generic.go:334] "Generic (PLEG): container finished" podID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerID="301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921" exitCode=0 Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.477508 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerDied","Data":"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921"} Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.478203 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-667d896bdd-mtmgs" event={"ID":"2e5e255c-5481-4fc1-937f-53014b9b1da3","Type":"ContainerDied","Data":"eb2ce85096e5add7053d4c6a8b10d2b7de4597750b3b8241e8ffb3c63b93b2ff"} Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.477550 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-667d896bdd-mtmgs" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.478237 4576 scope.go:117] "RemoveContainer" containerID="ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.480948 4576 generic.go:334] "Generic (PLEG): container finished" podID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerID="0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d" exitCode=143 Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.481018 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerDied","Data":"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d"} Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.487516 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16d4fe18-1b2e-4fcd-bbff-228c62eda882","Type":"ContainerDied","Data":"5c1e28d85bd31260d8605efaa14cde04ce7f6ed97fa8c1d2e6ceefb6053e6fa4"} Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.487634 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.505555 4576 scope.go:117] "RemoveContainer" containerID="301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.538696 4576 scope.go:117] "RemoveContainer" containerID="ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.539662 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683\": container with ID starting with ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683 not found: ID does not exist" containerID="ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.539734 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683"} err="failed to get container status \"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683\": rpc error: code = NotFound desc = could not find container \"ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683\": container with ID starting with ba163b07550f5a6b7db8c44eaf679328dae2ff9951b52f5efefc6dc4d42cd683 not found: ID does not exist" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.539769 4576 scope.go:117] "RemoveContainer" containerID="301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.540871 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921\": container with ID starting with 301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921 not found: ID does not exist" containerID="301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.540916 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921"} err="failed to get container status \"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921\": rpc error: code = NotFound desc = could not find container \"301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921\": container with ID starting with 301edefa9cd0abfc629e1af236fa272aad4c402e366458b862a267cf61069921 not found: ID does not exist" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.540943 4576 scope.go:117] "RemoveContainer" containerID="8db04dc6377036781d391edabbc632ad3f319e06d329b9a28f6dcb236fcd08d5" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.541050 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.548656 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-667d896bdd-mtmgs"] Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.559959 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.572732 4576 scope.go:117] "RemoveContainer" containerID="1282a19a6e6dc7e685230c54bfa2b9ec10f10492c1e76afe029cb6503e3c7008" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.573893 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.586971 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587449 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-central-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587470 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-central-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587486 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9878d95a-72e1-456b-aee3-f0bf43432249" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587495 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9878d95a-72e1-456b-aee3-f0bf43432249" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587507 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-api" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587515 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-api" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587558 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c224894c-4c8f-488a-a01e-69e2f721f329" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587569 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c224894c-4c8f-488a-a01e-69e2f721f329" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587586 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587595 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587609 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50764171-616e-4be8-b380-a5923d51773a" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587617 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="50764171-616e-4be8-b380-a5923d51773a" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587654 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="proxy-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587662 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="proxy-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587673 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-notification-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587681 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-notification-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587694 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af864d88-4394-47c6-883a-85d47c2a6832" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587702 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="af864d88-4394-47c6-883a-85d47c2a6832" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587716 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f7791e-f5ad-4d75-b250-d584031bc805" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587724 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f7791e-f5ad-4d75-b250-d584031bc805" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587753 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="sg-core" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587761 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="sg-core" Dec 03 09:14:01 crc kubenswrapper[4576]: E1203 09:14:01.587785 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.587793 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588001 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-central-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588014 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="50764171-616e-4be8-b380-a5923d51773a" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588023 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="ceilometer-notification-agent" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588036 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9878d95a-72e1-456b-aee3-f0bf43432249" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588045 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="proxy-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588059 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-api" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588082 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" containerName="sg-core" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588094 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588103 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" containerName="neutron-httpd" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588116 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c224894c-4c8f-488a-a01e-69e2f721f329" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588123 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f7791e-f5ad-4d75-b250-d584031bc805" containerName="mariadb-account-create-update" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.588136 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="af864d88-4394-47c6-883a-85d47c2a6832" containerName="mariadb-database-create" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.590139 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.593922 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.594126 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.597840 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653725 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653777 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653819 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653826 4576 scope.go:117] "RemoveContainer" containerID="e78bc7ac09b109886b894a3f14f211502703e0b618cfa910defcb0c19b00bd2c" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653883 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653948 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.653969 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l58qj\" (UniqueName: \"kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.654052 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.705790 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16d4fe18-1b2e-4fcd-bbff-228c62eda882" path="/var/lib/kubelet/pods/16d4fe18-1b2e-4fcd-bbff-228c62eda882/volumes" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.707093 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e5e255c-5481-4fc1-937f-53014b9b1da3" path="/var/lib/kubelet/pods/2e5e255c-5481-4fc1-937f-53014b9b1da3/volumes" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.714432 4576 scope.go:117] "RemoveContainer" containerID="0591aed9d23ecf817ade50c383df2325ce5a50c9f766a4d85666cf60f9fdb024" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.748619 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756580 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756703 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756831 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756879 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l58qj\" (UniqueName: \"kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756906 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756931 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.756959 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.757445 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.766951 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.768444 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.778900 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.781124 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.788204 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.790642 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l58qj\" (UniqueName: \"kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj\") pod \"ceilometer-0\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " pod="openstack/ceilometer-0" Dec 03 09:14:01 crc kubenswrapper[4576]: I1203 09:14:01.910717 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d649695d8-6rtxn" podUID="288ed488-5270-4966-b866-f9f015262989" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 03 09:14:02 crc kubenswrapper[4576]: I1203 09:14:02.008651 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:02 crc kubenswrapper[4576]: I1203 09:14:02.356806 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="9ee570a8-cd83-4db6-bffa-080a2dae8552" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.170:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:14:02 crc kubenswrapper[4576]: I1203 09:14:02.609433 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:02 crc kubenswrapper[4576]: I1203 09:14:02.918477 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.520850 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerStarted","Data":"5ee8c76da9e6b60df569d086c9165e01e84406a90478aee55adfc9ba76ff70ba"} Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.521170 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerStarted","Data":"4ba2b3155123339705eb03a68a2a68fb4cfde524da25a81f5f2905575d45618a"} Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.530977 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"649a142a-4649-45fb-bdba-11fcc838bf97","Type":"ContainerStarted","Data":"34f408bb502aec42322d124bd9d42dd3bc9aa0a77f78fdebf97b96a81e7981bb"} Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.555604 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.6462891109999997 podStartE2EDuration="39.555587033s" podCreationTimestamp="2025-12-03 09:13:24 +0000 UTC" firstStartedPulling="2025-12-03 09:13:26.364491839 +0000 UTC m=+2013.750468813" lastFinishedPulling="2025-12-03 09:14:02.273789751 +0000 UTC m=+2049.659766735" observedRunningTime="2025-12-03 09:14:03.554578956 +0000 UTC m=+2050.940555940" watchObservedRunningTime="2025-12-03 09:14:03.555587033 +0000 UTC m=+2050.941564017" Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.622709 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.625075 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-log" containerID="cri-o://993d7d5ae0d126496d669c638716a16f232631f0a0b7e46c5aed880460e30082" gracePeriod=30 Dec 03 09:14:03 crc kubenswrapper[4576]: I1203 09:14:03.625861 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-httpd" containerID="cri-o://ec6fb260ace858ff3252caf06df438bd629d9b661dc002528047350d813cb3f2" gracePeriod=30 Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.297603 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8v6tp"] Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.299396 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.305735 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-knrl7" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.305939 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.306067 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.317999 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8v6tp"] Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.408025 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.408180 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnxdv\" (UniqueName: \"kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.408271 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.408371 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.509614 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnxdv\" (UniqueName: \"kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.509683 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.509743 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.509773 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.517714 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.518181 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.521772 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.524569 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.532430 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnxdv\" (UniqueName: \"kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv\") pod \"nova-cell0-conductor-db-sync-8v6tp\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.552229 4576 generic.go:334] "Generic (PLEG): container finished" podID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerID="c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5" exitCode=0 Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.552293 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerDied","Data":"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5"} Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.552323 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e3d72d9-073c-46b3-826c-36b249b45fbf","Type":"ContainerDied","Data":"2244e1007be836cf9e5eb42da9134fa430ef1a18e765c97ec6049bc613bbdf04"} Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.552342 4576 scope.go:117] "RemoveContainer" containerID="c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.552491 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.576450 4576 generic.go:334] "Generic (PLEG): container finished" podID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerID="993d7d5ae0d126496d669c638716a16f232631f0a0b7e46c5aed880460e30082" exitCode=143 Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.576568 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerDied","Data":"993d7d5ae0d126496d669c638716a16f232631f0a0b7e46c5aed880460e30082"} Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.589843 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerStarted","Data":"2664c54938e0708189b78da9075c42218b209e9db6d3f975c8df2b41df3dd007"} Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.625447 4576 scope.go:117] "RemoveContainer" containerID="0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.656385 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.677576 4576 scope.go:117] "RemoveContainer" containerID="c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5" Dec 03 09:14:04 crc kubenswrapper[4576]: E1203 09:14:04.678941 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5\": container with ID starting with c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5 not found: ID does not exist" containerID="c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.678970 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5"} err="failed to get container status \"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5\": rpc error: code = NotFound desc = could not find container \"c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5\": container with ID starting with c45cfa9a734db0f97e84842cff06e72809eceede8e001086b188dc77fd9d63b5 not found: ID does not exist" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.678989 4576 scope.go:117] "RemoveContainer" containerID="0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d" Dec 03 09:14:04 crc kubenswrapper[4576]: E1203 09:14:04.682469 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d\": container with ID starting with 0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d not found: ID does not exist" containerID="0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.682500 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d"} err="failed to get container status \"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d\": rpc error: code = NotFound desc = could not find container \"0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d\": container with ID starting with 0bd15b9bed9b0a2b59d6288cceb3854684ceb4ab5e4491cc7ffa3e50db208c8d not found: ID does not exist" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716091 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716238 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716262 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716296 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716329 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716356 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716387 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.716439 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfddc\" (UniqueName: \"kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc\") pod \"8e3d72d9-073c-46b3-826c-36b249b45fbf\" (UID: \"8e3d72d9-073c-46b3-826c-36b249b45fbf\") " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.718030 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs" (OuterVolumeSpecName: "logs") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.720496 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.726705 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc" (OuterVolumeSpecName: "kube-api-access-jfddc") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "kube-api-access-jfddc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.743761 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.743933 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts" (OuterVolumeSpecName: "scripts") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.772937 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.811825 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.833404 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.838982 4576 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.842572 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.842677 4576 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e3d72d9-073c-46b3-826c-36b249b45fbf-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.842751 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.842843 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.842902 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfddc\" (UniqueName: \"kubernetes.io/projected/8e3d72d9-073c-46b3-826c-36b249b45fbf-kube-api-access-jfddc\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.869305 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.870354 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data" (OuterVolumeSpecName: "config-data") pod "8e3d72d9-073c-46b3-826c-36b249b45fbf" (UID: "8e3d72d9-073c-46b3-826c-36b249b45fbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.944948 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:04 crc kubenswrapper[4576]: I1203 09:14:04.945286 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d72d9-073c-46b3-826c-36b249b45fbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:05 crc kubenswrapper[4576]: W1203 09:14:05.119672 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fad4d6c_1741_4e92_b5c1_26e939b500df.slice/crio-f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d WatchSource:0}: Error finding container f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d: Status 404 returned error can't find the container with id f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.120181 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8v6tp"] Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.230322 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.238129 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.282773 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:05 crc kubenswrapper[4576]: E1203 09:14:05.283278 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-log" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.283350 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-log" Dec 03 09:14:05 crc kubenswrapper[4576]: E1203 09:14:05.283429 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-httpd" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.283481 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-httpd" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.283713 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-log" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.283800 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-httpd" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.284809 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.295315 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.299602 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.300106 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.363634 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364038 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-logs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364076 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364147 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364239 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh2vn\" (UniqueName: \"kubernetes.io/projected/ebb315c8-6124-44ed-8bb7-f82b228893e2-kube-api-access-mh2vn\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364273 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364405 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.364433 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465252 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465500 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465678 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465845 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-logs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465947 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.466071 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.466188 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh2vn\" (UniqueName: \"kubernetes.io/projected/ebb315c8-6124-44ed-8bb7-f82b228893e2-kube-api-access-mh2vn\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.466283 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.465872 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.469063 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ebb315c8-6124-44ed-8bb7-f82b228893e2-logs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.469662 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.475989 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-scripts\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.476686 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-config-data\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.477786 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.483394 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebb315c8-6124-44ed-8bb7-f82b228893e2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.487271 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh2vn\" (UniqueName: \"kubernetes.io/projected/ebb315c8-6124-44ed-8bb7-f82b228893e2-kube-api-access-mh2vn\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.522123 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ebb315c8-6124-44ed-8bb7-f82b228893e2\") " pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.599006 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerStarted","Data":"5317e058ed7a4e879ea8cca07f65752e47da5ef030de5c14b0ee5e363ff9e58a"} Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.600629 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" event={"ID":"4fad4d6c-1741-4e92-b5c1-26e939b500df","Type":"ContainerStarted","Data":"f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d"} Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.610305 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 09:14:05 crc kubenswrapper[4576]: I1203 09:14:05.694199 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" path="/var/lib/kubelet/pods/8e3d72d9-073c-46b3-826c-36b249b45fbf/volumes" Dec 03 09:14:06 crc kubenswrapper[4576]: I1203 09:14:06.082780 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 09:14:06 crc kubenswrapper[4576]: I1203 09:14:06.623875 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ebb315c8-6124-44ed-8bb7-f82b228893e2","Type":"ContainerStarted","Data":"3dbd225d27dc90c9cc12927aa39619d528a75ddb34a6f792360bb5da4ca0a92f"} Dec 03 09:14:07 crc kubenswrapper[4576]: I1203 09:14:07.647560 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:07 crc kubenswrapper[4576]: I1203 09:14:07.658494 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerStarted","Data":"4c03676d5fd77a60ae3672890cc54abd917383536bab80dc5f11fe6715ccda2c"} Dec 03 09:14:07 crc kubenswrapper[4576]: I1203 09:14:07.659296 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:14:07 crc kubenswrapper[4576]: I1203 09:14:07.665006 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ebb315c8-6124-44ed-8bb7-f82b228893e2","Type":"ContainerStarted","Data":"28eebba29a17f0e801f734b84c5f2eecb2d3248e769eb2a2549dbc4e6dc9aca1"} Dec 03 09:14:07 crc kubenswrapper[4576]: I1203 09:14:07.686496 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.02258513 podStartE2EDuration="6.686468234s" podCreationTimestamp="2025-12-03 09:14:01 +0000 UTC" firstStartedPulling="2025-12-03 09:14:02.604166368 +0000 UTC m=+2049.990143352" lastFinishedPulling="2025-12-03 09:14:06.268049472 +0000 UTC m=+2053.654026456" observedRunningTime="2025-12-03 09:14:07.680692326 +0000 UTC m=+2055.066669310" watchObservedRunningTime="2025-12-03 09:14:07.686468234 +0000 UTC m=+2055.072445218" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.214629 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:48386->10.217.0.151:9292: read: connection reset by peer" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.214717 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:48380->10.217.0.151:9292: read: connection reset by peer" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.677715 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ebb315c8-6124-44ed-8bb7-f82b228893e2","Type":"ContainerStarted","Data":"31bfbe2076e9b88d6f8b204f47d898c789925a10a1df261fe44d73a7cd5680b0"} Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683484 4576 generic.go:334] "Generic (PLEG): container finished" podID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerID="ec6fb260ace858ff3252caf06df438bd629d9b661dc002528047350d813cb3f2" exitCode=0 Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683702 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-central-agent" containerID="cri-o://5ee8c76da9e6b60df569d086c9165e01e84406a90478aee55adfc9ba76ff70ba" gracePeriod=30 Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683770 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerDied","Data":"ec6fb260ace858ff3252caf06df438bd629d9b661dc002528047350d813cb3f2"} Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683803 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"06a5ab71-58c3-4345-b8ac-00d09a1205d6","Type":"ContainerDied","Data":"00ce442ce26529ab960c7feb1c8b9e595718e6f3b9d9f9e4d6aca2db3a56bf56"} Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683814 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00ce442ce26529ab960c7feb1c8b9e595718e6f3b9d9f9e4d6aca2db3a56bf56" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683880 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="proxy-httpd" containerID="cri-o://4c03676d5fd77a60ae3672890cc54abd917383536bab80dc5f11fe6715ccda2c" gracePeriod=30 Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683916 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="sg-core" containerID="cri-o://5317e058ed7a4e879ea8cca07f65752e47da5ef030de5c14b0ee5e363ff9e58a" gracePeriod=30 Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.683946 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-notification-agent" containerID="cri-o://2664c54938e0708189b78da9075c42218b209e9db6d3f975c8df2b41df3dd007" gracePeriod=30 Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.709925 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.709904306 podStartE2EDuration="3.709904306s" podCreationTimestamp="2025-12-03 09:14:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:14:08.702569536 +0000 UTC m=+2056.088546520" watchObservedRunningTime="2025-12-03 09:14:08.709904306 +0000 UTC m=+2056.095881290" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.727471 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853176 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cbqw\" (UniqueName: \"kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853264 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853283 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853312 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853370 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853392 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853434 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.853454 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle\") pod \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\" (UID: \"06a5ab71-58c3-4345-b8ac-00d09a1205d6\") " Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.863123 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.864261 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs" (OuterVolumeSpecName: "logs") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.896904 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw" (OuterVolumeSpecName: "kube-api-access-5cbqw") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "kube-api-access-5cbqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.909197 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cbqw\" (UniqueName: \"kubernetes.io/projected/06a5ab71-58c3-4345-b8ac-00d09a1205d6-kube-api-access-5cbqw\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.909226 4576 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.909237 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06a5ab71-58c3-4345-b8ac-00d09a1205d6-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.952960 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:14:08 crc kubenswrapper[4576]: I1203 09:14:08.958251 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts" (OuterVolumeSpecName: "scripts") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.022657 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.022704 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.099650 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.099915 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.101455 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.124728 4576 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.124949 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.125048 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.141740 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data" (OuterVolumeSpecName: "config-data") pod "06a5ab71-58c3-4345-b8ac-00d09a1205d6" (UID: "06a5ab71-58c3-4345-b8ac-00d09a1205d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.229776 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06a5ab71-58c3-4345-b8ac-00d09a1205d6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.681000 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.681449 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704659 4576 generic.go:334] "Generic (PLEG): container finished" podID="42800208-9362-4971-990f-5ed4cac9f500" containerID="4c03676d5fd77a60ae3672890cc54abd917383536bab80dc5f11fe6715ccda2c" exitCode=0 Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704696 4576 generic.go:334] "Generic (PLEG): container finished" podID="42800208-9362-4971-990f-5ed4cac9f500" containerID="5317e058ed7a4e879ea8cca07f65752e47da5ef030de5c14b0ee5e363ff9e58a" exitCode=2 Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704703 4576 generic.go:334] "Generic (PLEG): container finished" podID="42800208-9362-4971-990f-5ed4cac9f500" containerID="2664c54938e0708189b78da9075c42218b209e9db6d3f975c8df2b41df3dd007" exitCode=0 Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704732 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerDied","Data":"4c03676d5fd77a60ae3672890cc54abd917383536bab80dc5f11fe6715ccda2c"} Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704806 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerDied","Data":"5317e058ed7a4e879ea8cca07f65752e47da5ef030de5c14b0ee5e363ff9e58a"} Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704820 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerDied","Data":"2664c54938e0708189b78da9075c42218b209e9db6d3f975c8df2b41df3dd007"} Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.704846 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.732444 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.745887 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.753360 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:09 crc kubenswrapper[4576]: E1203 09:14:09.753810 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-httpd" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.753827 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-httpd" Dec 03 09:14:09 crc kubenswrapper[4576]: E1203 09:14:09.753846 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-log" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.753853 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-log" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.754038 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-httpd" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.754060 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" containerName="glance-log" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.755028 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.760067 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.760078 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.766273 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840159 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840201 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840230 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840371 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840453 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840507 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840682 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.840893 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bxwm\" (UniqueName: \"kubernetes.io/projected/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-kube-api-access-4bxwm\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944472 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bxwm\" (UniqueName: \"kubernetes.io/projected/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-kube-api-access-4bxwm\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944566 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944593 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944619 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944654 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944682 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944706 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.944754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.945043 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.945125 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.945208 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.951422 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.954622 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.954992 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.957709 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.978311 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bxwm\" (UniqueName: \"kubernetes.io/projected/ad4e4d0b-1390-463b-9337-3b3d8f6ca758-kube-api-access-4bxwm\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:09 crc kubenswrapper[4576]: I1203 09:14:09.993653 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad4e4d0b-1390-463b-9337-3b3d8f6ca758\") " pod="openstack/glance-default-internal-api-0" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.080024 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.723928 4576 generic.go:334] "Generic (PLEG): container finished" podID="42800208-9362-4971-990f-5ed4cac9f500" containerID="5ee8c76da9e6b60df569d086c9165e01e84406a90478aee55adfc9ba76ff70ba" exitCode=0 Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.724050 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerDied","Data":"5ee8c76da9e6b60df569d086c9165e01e84406a90478aee55adfc9ba76ff70ba"} Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.830660 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.839555 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867069 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867118 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867167 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867251 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867363 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l58qj\" (UniqueName: \"kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867381 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.867429 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data\") pod \"42800208-9362-4971-990f-5ed4cac9f500\" (UID: \"42800208-9362-4971-990f-5ed4cac9f500\") " Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.869433 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.870031 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.878013 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj" (OuterVolumeSpecName: "kube-api-access-l58qj") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "kube-api-access-l58qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.880738 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts" (OuterVolumeSpecName: "scripts") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.969042 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l58qj\" (UniqueName: \"kubernetes.io/projected/42800208-9362-4971-990f-5ed4cac9f500-kube-api-access-l58qj\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.969076 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.969086 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.969096 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/42800208-9362-4971-990f-5ed4cac9f500-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.979010 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:10 crc kubenswrapper[4576]: I1203 09:14:10.986675 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.073299 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data" (OuterVolumeSpecName: "config-data") pod "42800208-9362-4971-990f-5ed4cac9f500" (UID: "42800208-9362-4971-990f-5ed4cac9f500"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.075072 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.075100 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.075111 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42800208-9362-4971-990f-5ed4cac9f500-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.755726 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a5ab71-58c3-4345-b8ac-00d09a1205d6" path="/var/lib/kubelet/pods/06a5ab71-58c3-4345-b8ac-00d09a1205d6/volumes" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.776108 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad4e4d0b-1390-463b-9337-3b3d8f6ca758","Type":"ContainerStarted","Data":"58860b0799d193a049df5e177627e5b9e17da861afdb73a9ea2881de6b0f65cb"} Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.776153 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad4e4d0b-1390-463b-9337-3b3d8f6ca758","Type":"ContainerStarted","Data":"d7aa2208706f60cfd9ee030a7e2972bbfbf6a8f9f868726cc90fe2ef7ecea5ef"} Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.792118 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.792054 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"42800208-9362-4971-990f-5ed4cac9f500","Type":"ContainerDied","Data":"4ba2b3155123339705eb03a68a2a68fb4cfde524da25a81f5f2905575d45618a"} Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.793712 4576 scope.go:117] "RemoveContainer" containerID="4c03676d5fd77a60ae3672890cc54abd917383536bab80dc5f11fe6715ccda2c" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.838782 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.843135 4576 scope.go:117] "RemoveContainer" containerID="5317e058ed7a4e879ea8cca07f65752e47da5ef030de5c14b0ee5e363ff9e58a" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.864033 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.872380 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.872581 4576 scope.go:117] "RemoveContainer" containerID="2664c54938e0708189b78da9075c42218b209e9db6d3f975c8df2b41df3dd007" Dec 03 09:14:11 crc kubenswrapper[4576]: E1203 09:14:11.872929 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-central-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.872948 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-central-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: E1203 09:14:11.872982 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="sg-core" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.872988 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="sg-core" Dec 03 09:14:11 crc kubenswrapper[4576]: E1203 09:14:11.872999 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="proxy-httpd" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873005 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="proxy-httpd" Dec 03 09:14:11 crc kubenswrapper[4576]: E1203 09:14:11.873024 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-notification-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873031 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-notification-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873200 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="proxy-httpd" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873211 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-notification-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873220 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="sg-core" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.873242 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="42800208-9362-4971-990f-5ed4cac9f500" containerName="ceilometer-central-agent" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.874987 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.878643 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.878884 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.889252 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:11 crc kubenswrapper[4576]: I1203 09:14:11.940762 4576 scope.go:117] "RemoveContainer" containerID="5ee8c76da9e6b60df569d086c9165e01e84406a90478aee55adfc9ba76ff70ba" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.025521 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.025687 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.025801 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.026027 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.026347 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.026485 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qxct\" (UniqueName: \"kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.026645 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128270 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128426 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128475 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128502 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128587 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128633 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.128683 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qxct\" (UniqueName: \"kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.129487 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.129565 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.135636 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.136793 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.149668 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.153059 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qxct\" (UniqueName: \"kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.154663 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts\") pod \"ceilometer-0\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.209446 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.712284 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.811315 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad4e4d0b-1390-463b-9337-3b3d8f6ca758","Type":"ContainerStarted","Data":"a0f5c77657f5f3f2cb5a68b9ca11483aace0c568f72f872d2a0899e8340d6965"} Dec 03 09:14:12 crc kubenswrapper[4576]: I1203 09:14:12.840432 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.840413446 podStartE2EDuration="3.840413446s" podCreationTimestamp="2025-12-03 09:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:14:12.827801802 +0000 UTC m=+2060.213778796" watchObservedRunningTime="2025-12-03 09:14:12.840413446 +0000 UTC m=+2060.226390430" Dec 03 09:14:13 crc kubenswrapper[4576]: I1203 09:14:13.689753 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42800208-9362-4971-990f-5ed4cac9f500" path="/var/lib/kubelet/pods/42800208-9362-4971-990f-5ed4cac9f500/volumes" Dec 03 09:14:14 crc kubenswrapper[4576]: I1203 09:14:14.418094 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:14:14 crc kubenswrapper[4576]: I1203 09:14:14.452291 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.000830 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.611509 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.611882 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.662518 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.663030 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.845266 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 09:14:15 crc kubenswrapper[4576]: I1203 09:14:15.845859 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 09:14:16 crc kubenswrapper[4576]: I1203 09:14:16.579064 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6d649695d8-6rtxn" Dec 03 09:14:16 crc kubenswrapper[4576]: I1203 09:14:16.623598 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:14:16 crc kubenswrapper[4576]: I1203 09:14:16.676952 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:14:16 crc kubenswrapper[4576]: I1203 09:14:16.860277 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon-log" containerID="cri-o://de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a" gracePeriod=30 Dec 03 09:14:16 crc kubenswrapper[4576]: I1203 09:14:16.860443 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" containerID="cri-o://6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e" gracePeriod=30 Dec 03 09:14:17 crc kubenswrapper[4576]: I1203 09:14:17.874375 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:14:17 crc kubenswrapper[4576]: I1203 09:14:17.874404 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:14:18 crc kubenswrapper[4576]: I1203 09:14:18.776845 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 09:14:18 crc kubenswrapper[4576]: I1203 09:14:18.782676 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 09:14:20 crc kubenswrapper[4576]: I1203 09:14:20.081185 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:20 crc kubenswrapper[4576]: I1203 09:14:20.081242 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:20 crc kubenswrapper[4576]: I1203 09:14:20.111178 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:20 crc kubenswrapper[4576]: I1203 09:14:20.121940 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:21 crc kubenswrapper[4576]: I1203 09:14:21.056485 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:21 crc kubenswrapper[4576]: I1203 09:14:21.056550 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:21 crc kubenswrapper[4576]: I1203 09:14:21.745965 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:14:22 crc kubenswrapper[4576]: I1203 09:14:22.113331 4576 generic.go:334] "Generic (PLEG): container finished" podID="83ab6db2-7b9e-4161-a064-56fe67986825" containerID="6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e" exitCode=0 Dec 03 09:14:22 crc kubenswrapper[4576]: I1203 09:14:22.114123 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerDied","Data":"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e"} Dec 03 09:14:22 crc kubenswrapper[4576]: I1203 09:14:22.114154 4576 scope.go:117] "RemoveContainer" containerID="ecc1d0da0e23f836b4057d594436f8f67b6ca64b352fcb8fb4eef6e69fd70084" Dec 03 09:14:23 crc kubenswrapper[4576]: E1203 09:14:23.095993 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified" Dec 03 09:14:23 crc kubenswrapper[4576]: E1203 09:14:23.096450 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nnxdv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-8v6tp_openstack(4fad4d6c-1741-4e92-b5c1-26e939b500df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:14:23 crc kubenswrapper[4576]: E1203 09:14:23.097952 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" Dec 03 09:14:23 crc kubenswrapper[4576]: I1203 09:14:23.129166 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerStarted","Data":"73f9b9d8cef8b614f99b3f6d9685c399d266f033bb0f2f50c55c91169a0b4090"} Dec 03 09:14:23 crc kubenswrapper[4576]: E1203 09:14:23.132002 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" Dec 03 09:14:23 crc kubenswrapper[4576]: I1203 09:14:23.833376 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:23 crc kubenswrapper[4576]: I1203 09:14:23.833866 4576 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 09:14:24 crc kubenswrapper[4576]: I1203 09:14:24.013918 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 09:14:24 crc kubenswrapper[4576]: I1203 09:14:24.143332 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerStarted","Data":"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1"} Dec 03 09:14:25 crc kubenswrapper[4576]: I1203 09:14:25.153998 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerStarted","Data":"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544"} Dec 03 09:14:26 crc kubenswrapper[4576]: I1203 09:14:26.180572 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerStarted","Data":"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940"} Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191328 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerStarted","Data":"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc"} Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191451 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-central-agent" containerID="cri-o://edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1" gracePeriod=30 Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191502 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-notification-agent" containerID="cri-o://c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544" gracePeriod=30 Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191506 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="sg-core" containerID="cri-o://09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940" gracePeriod=30 Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191625 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="proxy-httpd" containerID="cri-o://753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc" gracePeriod=30 Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.191772 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:14:27 crc kubenswrapper[4576]: I1203 09:14:27.229154 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=12.748358398 podStartE2EDuration="16.229135066s" podCreationTimestamp="2025-12-03 09:14:11 +0000 UTC" firstStartedPulling="2025-12-03 09:14:23.100156917 +0000 UTC m=+2070.486133901" lastFinishedPulling="2025-12-03 09:14:26.580933585 +0000 UTC m=+2073.966910569" observedRunningTime="2025-12-03 09:14:27.218974799 +0000 UTC m=+2074.604951783" watchObservedRunningTime="2025-12-03 09:14:27.229135066 +0000 UTC m=+2074.615112050" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204411 4576 generic.go:334] "Generic (PLEG): container finished" podID="70322375-dd50-4e80-950a-87f4f81c0848" containerID="753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc" exitCode=0 Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204457 4576 generic.go:334] "Generic (PLEG): container finished" podID="70322375-dd50-4e80-950a-87f4f81c0848" containerID="09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940" exitCode=2 Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204470 4576 generic.go:334] "Generic (PLEG): container finished" podID="70322375-dd50-4e80-950a-87f4f81c0848" containerID="c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544" exitCode=0 Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204479 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerDied","Data":"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc"} Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204561 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerDied","Data":"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940"} Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.204573 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerDied","Data":"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544"} Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.761709 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.878949 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qxct\" (UniqueName: \"kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879336 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879464 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879519 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879572 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879663 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.879686 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data\") pod \"70322375-dd50-4e80-950a-87f4f81c0848\" (UID: \"70322375-dd50-4e80-950a-87f4f81c0848\") " Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.880350 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.881795 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.887196 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct" (OuterVolumeSpecName: "kube-api-access-7qxct") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "kube-api-access-7qxct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.904622 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts" (OuterVolumeSpecName: "scripts") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.915603 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.965910 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985076 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985104 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985116 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985127 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70322375-dd50-4e80-950a-87f4f81c0848-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985136 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qxct\" (UniqueName: \"kubernetes.io/projected/70322375-dd50-4e80-950a-87f4f81c0848-kube-api-access-7qxct\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:28 crc kubenswrapper[4576]: I1203 09:14:28.985144 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.001650 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data" (OuterVolumeSpecName: "config-data") pod "70322375-dd50-4e80-950a-87f4f81c0848" (UID: "70322375-dd50-4e80-950a-87f4f81c0848"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.086280 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70322375-dd50-4e80-950a-87f4f81c0848-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.221809 4576 generic.go:334] "Generic (PLEG): container finished" podID="70322375-dd50-4e80-950a-87f4f81c0848" containerID="edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1" exitCode=0 Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.221856 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerDied","Data":"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1"} Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.221956 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70322375-dd50-4e80-950a-87f4f81c0848","Type":"ContainerDied","Data":"73f9b9d8cef8b614f99b3f6d9685c399d266f033bb0f2f50c55c91169a0b4090"} Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.221883 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.222002 4576 scope.go:117] "RemoveContainer" containerID="753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.246187 4576 scope.go:117] "RemoveContainer" containerID="09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.289517 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.295884 4576 scope.go:117] "RemoveContainer" containerID="c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.310761 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.331300 4576 scope.go:117] "RemoveContainer" containerID="edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.334853 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.335405 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="proxy-httpd" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335431 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="proxy-httpd" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.335451 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-central-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335462 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-central-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.335501 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-notification-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335511 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-notification-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.335545 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="sg-core" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335553 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="sg-core" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335814 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-notification-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335853 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="sg-core" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335874 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="proxy-httpd" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.335889 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70322375-dd50-4e80-950a-87f4f81c0848" containerName="ceilometer-central-agent" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.338392 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.342003 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.345642 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.367696 4576 scope.go:117] "RemoveContainer" containerID="753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.368148 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc\": container with ID starting with 753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc not found: ID does not exist" containerID="753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368183 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc"} err="failed to get container status \"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc\": rpc error: code = NotFound desc = could not find container \"753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc\": container with ID starting with 753af3d2cd834ff3f1c362143e381eff1bf24457d2b122079a1dd596aca9d1fc not found: ID does not exist" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368205 4576 scope.go:117] "RemoveContainer" containerID="09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.368391 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940\": container with ID starting with 09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940 not found: ID does not exist" containerID="09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368413 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940"} err="failed to get container status \"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940\": rpc error: code = NotFound desc = could not find container \"09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940\": container with ID starting with 09bd742fe23c4f790c940bf5583d2452f9b962404bc54ae67b43c9b591f86940 not found: ID does not exist" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368427 4576 scope.go:117] "RemoveContainer" containerID="c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544" Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.368745 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544\": container with ID starting with c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544 not found: ID does not exist" containerID="c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368772 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544"} err="failed to get container status \"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544\": rpc error: code = NotFound desc = could not find container \"c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544\": container with ID starting with c7083d0206b38b18a338b1f693c314a45279050a07f390bff6f0bf671f8be544 not found: ID does not exist" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368793 4576 scope.go:117] "RemoveContainer" containerID="edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.368868 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:29 crc kubenswrapper[4576]: E1203 09:14:29.369615 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1\": container with ID starting with edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1 not found: ID does not exist" containerID="edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.369637 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1"} err="failed to get container status \"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1\": rpc error: code = NotFound desc = could not find container \"edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1\": container with ID starting with edd45e872961a3a52acc64efc074a834a0cb474423f2003147cc15f5441a12e1 not found: ID does not exist" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497247 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497454 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497622 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497746 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497785 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497806 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hpbh\" (UniqueName: \"kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.497822 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.599884 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600318 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600470 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600575 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600611 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hpbh\" (UniqueName: \"kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600644 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600735 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.600873 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.601025 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.607307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.607592 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.609620 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.609699 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.628761 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hpbh\" (UniqueName: \"kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh\") pod \"ceilometer-0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.668691 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:14:29 crc kubenswrapper[4576]: I1203 09:14:29.702723 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70322375-dd50-4e80-950a-87f4f81c0848" path="/var/lib/kubelet/pods/70322375-dd50-4e80-950a-87f4f81c0848/volumes" Dec 03 09:14:30 crc kubenswrapper[4576]: I1203 09:14:30.181579 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:14:30 crc kubenswrapper[4576]: I1203 09:14:30.199094 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:14:30 crc kubenswrapper[4576]: I1203 09:14:30.231838 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerStarted","Data":"ec6e7be302780c943b57dbafb6577feb1279315130e7311dded2dd89194f8e30"} Dec 03 09:14:31 crc kubenswrapper[4576]: I1203 09:14:31.242573 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerStarted","Data":"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5"} Dec 03 09:14:31 crc kubenswrapper[4576]: I1203 09:14:31.745722 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:14:32 crc kubenswrapper[4576]: I1203 09:14:32.256797 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerStarted","Data":"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8"} Dec 03 09:14:33 crc kubenswrapper[4576]: I1203 09:14:33.268330 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerStarted","Data":"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22"} Dec 03 09:14:34 crc kubenswrapper[4576]: I1203 09:14:34.282691 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerStarted","Data":"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1"} Dec 03 09:14:34 crc kubenswrapper[4576]: I1203 09:14:34.285861 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:14:34 crc kubenswrapper[4576]: I1203 09:14:34.321849 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.587761749 podStartE2EDuration="5.32182438s" podCreationTimestamp="2025-12-03 09:14:29 +0000 UTC" firstStartedPulling="2025-12-03 09:14:30.198802034 +0000 UTC m=+2077.584779018" lastFinishedPulling="2025-12-03 09:14:33.932864665 +0000 UTC m=+2081.318841649" observedRunningTime="2025-12-03 09:14:34.312085395 +0000 UTC m=+2081.698062389" watchObservedRunningTime="2025-12-03 09:14:34.32182438 +0000 UTC m=+2081.707801374" Dec 03 09:14:34 crc kubenswrapper[4576]: I1203 09:14:34.327608 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 09:14:34 crc kubenswrapper[4576]: I1203 09:14:34.329440 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="8e3d72d9-073c-46b3-826c-36b249b45fbf" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 09:14:38 crc kubenswrapper[4576]: I1203 09:14:38.350440 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" event={"ID":"4fad4d6c-1741-4e92-b5c1-26e939b500df","Type":"ContainerStarted","Data":"9c208cd42b265841f6ca8b227674dc625009260199fdd77dbf2b73c5e73e86f7"} Dec 03 09:14:38 crc kubenswrapper[4576]: I1203 09:14:38.385450 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" podStartSLOduration=2.139256448 podStartE2EDuration="34.385422495s" podCreationTimestamp="2025-12-03 09:14:04 +0000 UTC" firstStartedPulling="2025-12-03 09:14:05.126412575 +0000 UTC m=+2052.512389559" lastFinishedPulling="2025-12-03 09:14:37.372578622 +0000 UTC m=+2084.758555606" observedRunningTime="2025-12-03 09:14:38.38307103 +0000 UTC m=+2085.769048014" watchObservedRunningTime="2025-12-03 09:14:38.385422495 +0000 UTC m=+2085.771399529" Dec 03 09:14:39 crc kubenswrapper[4576]: I1203 09:14:39.692207 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:14:39 crc kubenswrapper[4576]: I1203 09:14:39.692607 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:14:39 crc kubenswrapper[4576]: I1203 09:14:39.702601 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:14:39 crc kubenswrapper[4576]: I1203 09:14:39.703428 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:14:39 crc kubenswrapper[4576]: I1203 09:14:39.703512 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc" gracePeriod=600 Dec 03 09:14:40 crc kubenswrapper[4576]: E1203 09:14:40.184228 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60b1bede_26e9_4b5d_b450_9866da685693.slice/crio-conmon-3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:14:40 crc kubenswrapper[4576]: I1203 09:14:40.373965 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc" exitCode=0 Dec 03 09:14:40 crc kubenswrapper[4576]: I1203 09:14:40.374020 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc"} Dec 03 09:14:40 crc kubenswrapper[4576]: I1203 09:14:40.374056 4576 scope.go:117] "RemoveContainer" containerID="971057ff9d8dd948ea2baec3fb0fe13004ef1bdcb132acd7f101f25e8ebc2c91" Dec 03 09:14:41 crc kubenswrapper[4576]: I1203 09:14:41.389003 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324"} Dec 03 09:14:41 crc kubenswrapper[4576]: I1203 09:14:41.745284 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5d9b9454d4-cbqlk" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 03 09:14:41 crc kubenswrapper[4576]: I1203 09:14:41.745417 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.329207 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.463278 4576 generic.go:334] "Generic (PLEG): container finished" podID="83ab6db2-7b9e-4161-a064-56fe67986825" containerID="de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a" exitCode=137 Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.463334 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d9b9454d4-cbqlk" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.463336 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerDied","Data":"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a"} Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.463500 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d9b9454d4-cbqlk" event={"ID":"83ab6db2-7b9e-4161-a064-56fe67986825","Type":"ContainerDied","Data":"ed9cf7cd116e078e6c51b9dfe988c73ac1d75e4edbaa78fda3c5bb47f277172d"} Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.463555 4576 scope.go:117] "RemoveContainer" containerID="6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475421 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475611 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475650 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475726 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8rgb\" (UniqueName: \"kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475745 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475788 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.475806 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data\") pod \"83ab6db2-7b9e-4161-a064-56fe67986825\" (UID: \"83ab6db2-7b9e-4161-a064-56fe67986825\") " Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.477073 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs" (OuterVolumeSpecName: "logs") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.514917 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.529674 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts" (OuterVolumeSpecName: "scripts") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.532196 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data" (OuterVolumeSpecName: "config-data") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.536712 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.537283 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb" (OuterVolumeSpecName: "kube-api-access-n8rgb") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "kube-api-access-n8rgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.555011 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "83ab6db2-7b9e-4161-a064-56fe67986825" (UID: "83ab6db2-7b9e-4161-a064-56fe67986825"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578146 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83ab6db2-7b9e-4161-a064-56fe67986825-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578217 4576 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578233 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8rgb\" (UniqueName: \"kubernetes.io/projected/83ab6db2-7b9e-4161-a064-56fe67986825-kube-api-access-n8rgb\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578245 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578258 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578286 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83ab6db2-7b9e-4161-a064-56fe67986825-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.578295 4576 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/83ab6db2-7b9e-4161-a064-56fe67986825-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.698577 4576 scope.go:117] "RemoveContainer" containerID="de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.723402 4576 scope.go:117] "RemoveContainer" containerID="6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e" Dec 03 09:14:47 crc kubenswrapper[4576]: E1203 09:14:47.724083 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e\": container with ID starting with 6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e not found: ID does not exist" containerID="6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.724224 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e"} err="failed to get container status \"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e\": rpc error: code = NotFound desc = could not find container \"6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e\": container with ID starting with 6656ae443ae4c2e89e4664ef221d9536d0cd3bd54b0704ab14a5be9f2234e78e not found: ID does not exist" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.724378 4576 scope.go:117] "RemoveContainer" containerID="de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a" Dec 03 09:14:47 crc kubenswrapper[4576]: E1203 09:14:47.724779 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a\": container with ID starting with de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a not found: ID does not exist" containerID="de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.724819 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a"} err="failed to get container status \"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a\": rpc error: code = NotFound desc = could not find container \"de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a\": container with ID starting with de540cc70f90a90805d6634416160cae0d2a1d21d13f2dece12fc7df31fde65a not found: ID does not exist" Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.784791 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:14:47 crc kubenswrapper[4576]: I1203 09:14:47.794913 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5d9b9454d4-cbqlk"] Dec 03 09:14:49 crc kubenswrapper[4576]: I1203 09:14:49.488270 4576 generic.go:334] "Generic (PLEG): container finished" podID="4fad4d6c-1741-4e92-b5c1-26e939b500df" containerID="9c208cd42b265841f6ca8b227674dc625009260199fdd77dbf2b73c5e73e86f7" exitCode=0 Dec 03 09:14:49 crc kubenswrapper[4576]: I1203 09:14:49.488389 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" event={"ID":"4fad4d6c-1741-4e92-b5c1-26e939b500df","Type":"ContainerDied","Data":"9c208cd42b265841f6ca8b227674dc625009260199fdd77dbf2b73c5e73e86f7"} Dec 03 09:14:49 crc kubenswrapper[4576]: I1203 09:14:49.689004 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" path="/var/lib/kubelet/pods/83ab6db2-7b9e-4161-a064-56fe67986825/volumes" Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.830376 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.942334 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data\") pod \"4fad4d6c-1741-4e92-b5c1-26e939b500df\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.942557 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts\") pod \"4fad4d6c-1741-4e92-b5c1-26e939b500df\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.942876 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle\") pod \"4fad4d6c-1741-4e92-b5c1-26e939b500df\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.943536 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnxdv\" (UniqueName: \"kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv\") pod \"4fad4d6c-1741-4e92-b5c1-26e939b500df\" (UID: \"4fad4d6c-1741-4e92-b5c1-26e939b500df\") " Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.948548 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts" (OuterVolumeSpecName: "scripts") pod "4fad4d6c-1741-4e92-b5c1-26e939b500df" (UID: "4fad4d6c-1741-4e92-b5c1-26e939b500df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.949933 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv" (OuterVolumeSpecName: "kube-api-access-nnxdv") pod "4fad4d6c-1741-4e92-b5c1-26e939b500df" (UID: "4fad4d6c-1741-4e92-b5c1-26e939b500df"). InnerVolumeSpecName "kube-api-access-nnxdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.980719 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data" (OuterVolumeSpecName: "config-data") pod "4fad4d6c-1741-4e92-b5c1-26e939b500df" (UID: "4fad4d6c-1741-4e92-b5c1-26e939b500df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:50 crc kubenswrapper[4576]: I1203 09:14:50.980745 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fad4d6c-1741-4e92-b5c1-26e939b500df" (UID: "4fad4d6c-1741-4e92-b5c1-26e939b500df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.046031 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.046085 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.046097 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fad4d6c-1741-4e92-b5c1-26e939b500df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.046111 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnxdv\" (UniqueName: \"kubernetes.io/projected/4fad4d6c-1741-4e92-b5c1-26e939b500df-kube-api-access-nnxdv\") on node \"crc\" DevicePath \"\"" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.587488 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" event={"ID":"4fad4d6c-1741-4e92-b5c1-26e939b500df","Type":"ContainerDied","Data":"f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d"} Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.587785 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5c68908e58da66a1b029340ce7b749a0f10d6d23684360fa34a8dd0167ede3d" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.587689 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-8v6tp" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674215 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 09:14:51 crc kubenswrapper[4576]: E1203 09:14:51.674678 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674693 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:51 crc kubenswrapper[4576]: E1203 09:14:51.674717 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon-log" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674723 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon-log" Dec 03 09:14:51 crc kubenswrapper[4576]: E1203 09:14:51.674737 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" containerName="nova-cell0-conductor-db-sync" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674745 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" containerName="nova-cell0-conductor-db-sync" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674980 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.674998 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" containerName="nova-cell0-conductor-db-sync" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.675015 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon-log" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.675653 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.679077 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.679585 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-knrl7" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.702001 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.878303 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttdx6\" (UniqueName: \"kubernetes.io/projected/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-kube-api-access-ttdx6\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.878432 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.878492 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.980241 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttdx6\" (UniqueName: \"kubernetes.io/projected/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-kube-api-access-ttdx6\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.980322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.980372 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.985971 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:51 crc kubenswrapper[4576]: I1203 09:14:51.988325 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:52 crc kubenswrapper[4576]: I1203 09:14:52.009996 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttdx6\" (UniqueName: \"kubernetes.io/projected/2ce21985-e2a9-48a8-bdca-ad4dc248ff98-kube-api-access-ttdx6\") pod \"nova-cell0-conductor-0\" (UID: \"2ce21985-e2a9-48a8-bdca-ad4dc248ff98\") " pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:52 crc kubenswrapper[4576]: I1203 09:14:52.301368 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:52 crc kubenswrapper[4576]: I1203 09:14:52.791460 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 09:14:52 crc kubenswrapper[4576]: W1203 09:14:52.799700 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ce21985_e2a9_48a8_bdca_ad4dc248ff98.slice/crio-1048666dd0f3ba53416e5ac06e9845bd6ecbb9bd4880f755205d5f04157c28a8 WatchSource:0}: Error finding container 1048666dd0f3ba53416e5ac06e9845bd6ecbb9bd4880f755205d5f04157c28a8: Status 404 returned error can't find the container with id 1048666dd0f3ba53416e5ac06e9845bd6ecbb9bd4880f755205d5f04157c28a8 Dec 03 09:14:53 crc kubenswrapper[4576]: I1203 09:14:53.624042 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2ce21985-e2a9-48a8-bdca-ad4dc248ff98","Type":"ContainerStarted","Data":"9abc5e081d1d45a03e8a4cc4807b8b1054aa758943ebdeca396ed8db14fb9267"} Dec 03 09:14:53 crc kubenswrapper[4576]: I1203 09:14:53.624422 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:53 crc kubenswrapper[4576]: I1203 09:14:53.624438 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"2ce21985-e2a9-48a8-bdca-ad4dc248ff98","Type":"ContainerStarted","Data":"1048666dd0f3ba53416e5ac06e9845bd6ecbb9bd4880f755205d5f04157c28a8"} Dec 03 09:14:53 crc kubenswrapper[4576]: I1203 09:14:53.650706 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.650687757 podStartE2EDuration="2.650687757s" podCreationTimestamp="2025-12-03 09:14:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:14:53.644006164 +0000 UTC m=+2101.029983148" watchObservedRunningTime="2025-12-03 09:14:53.650687757 +0000 UTC m=+2101.036664741" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.328436 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.845399 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-fm89l"] Dec 03 09:14:57 crc kubenswrapper[4576]: E1203 09:14:57.846154 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.846171 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.846363 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ab6db2-7b9e-4161-a064-56fe67986825" containerName="horizon" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.846945 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.851015 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.855614 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 09:14:57 crc kubenswrapper[4576]: I1203 09:14:57.909184 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fm89l"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.009148 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.009468 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.009611 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.009669 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlv2h\" (UniqueName: \"kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.138295 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.138770 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.138827 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.138859 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlv2h\" (UniqueName: \"kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.165874 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.177221 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.180803 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlv2h\" (UniqueName: \"kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.191949 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.200195 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.204995 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.249333 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data\") pod \"nova-cell0-cell-mapping-fm89l\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.301088 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.342328 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.344878 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.415150 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.415229 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vptw\" (UniqueName: \"kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.415304 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.417655 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.421702 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.456386 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.461181 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.471639 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.512537 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.516853 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.516905 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7qnb\" (UniqueName: \"kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.516951 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.516975 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.517002 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vptw\" (UniqueName: \"kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.517038 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.517077 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.525980 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.526556 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.542242 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.589220 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.593644 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.603301 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.604336 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vptw\" (UniqueName: \"kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw\") pod \"nova-scheduler-0\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.620660 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.620932 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.621156 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm5sx\" (UniqueName: \"kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.621325 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.621523 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.621669 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7qnb\" (UniqueName: \"kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.621812 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.625116 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.625480 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.627153 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.644093 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.673240 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.675687 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.685496 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7qnb\" (UniqueName: \"kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb\") pod \"nova-metadata-0\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.708520 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724249 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724489 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724637 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724717 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724798 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm5sx\" (UniqueName: \"kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724883 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk2f2\" (UniqueName: \"kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724958 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.724960 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.734078 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.748027 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.758901 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm5sx\" (UniqueName: \"kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx\") pod \"nova-cell1-novncproxy-0\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.763058 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:14:58 crc kubenswrapper[4576]: I1203 09:14:58.794233 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382583 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382627 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382650 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382729 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382748 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382779 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk2f2\" (UniqueName: \"kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382830 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwbx8\" (UniqueName: \"kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382874 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.382917 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.383585 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.387684 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.392544 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.400279 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.424093 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk2f2\" (UniqueName: \"kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2\") pod \"nova-api-0\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489267 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489341 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwbx8\" (UniqueName: \"kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489373 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489403 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489465 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.489516 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.490470 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.491026 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.491809 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.492359 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.492869 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.533263 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwbx8\" (UniqueName: \"kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8\") pod \"dnsmasq-dns-bccf8f775-rzzhl\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.554200 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.626420 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.778961 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 09:14:59 crc kubenswrapper[4576]: I1203 09:14:59.900315 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fm89l"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.185107 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.207552 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.214215 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.214371 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.249645 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.331878 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.355708 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.355928 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.356010 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w52m\" (UniqueName: \"kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.397836 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.459055 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w52m\" (UniqueName: \"kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.459751 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.460956 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.462359 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.508884 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.511263 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.534640 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w52m\" (UniqueName: \"kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m\") pod \"collect-profiles-29412555-7sjxl\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.590865 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.663111 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.691648 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.777617 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bnv8z"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.781312 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.782428 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fm89l" event={"ID":"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24","Type":"ContainerStarted","Data":"740357c445c00011968b252024d87485977eeccd7e173d9004c382b862d7c8ed"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.782560 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fm89l" event={"ID":"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24","Type":"ContainerStarted","Data":"bdcee4ffad261057e1cc1c96bb93061afe74cd8b9c8090d968ef04a284bc9447"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.790428 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" event={"ID":"7b4f90af-69a8-42f0-a715-ea40baaec5c3","Type":"ContainerStarted","Data":"ac81b8a934ab1ccdbff8c8f20223f54976989ed1c20c65d573ea06afec696461"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.800408 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.800969 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.809242 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bnv8z"] Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.809576 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"386046da-1085-4aca-bc7d-a9960318c1cb","Type":"ContainerStarted","Data":"8273f6fb07401c3bbdb72ad039ce12aa6bf1c0f2807c67759139273f5472cfec"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.829046 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.832126 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerStarted","Data":"c408832e626701bbefa6e9b7b988816ceb8a78e9ef8d0da118978f7cef4a4469"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.842143 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.842182 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msxdg\" (UniqueName: \"kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.842348 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.856068 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerStarted","Data":"7f6f65122e86721597a3dfb0368aaaccda08fa66b657391ab290dccd7543b31b"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.859946 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8d49b775-1c3c-430a-9d73-744e4186631f","Type":"ContainerStarted","Data":"1a73784ae7f0fc5001ad713467deb054ba6bdda4228df130121336ab35a7204c"} Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.867964 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-fm89l" podStartSLOduration=3.867936341 podStartE2EDuration="3.867936341s" podCreationTimestamp="2025-12-03 09:14:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:00.841453918 +0000 UTC m=+2108.227430902" watchObservedRunningTime="2025-12-03 09:15:00.867936341 +0000 UTC m=+2108.253913325" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.945006 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.945280 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msxdg\" (UniqueName: \"kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.945447 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.945634 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.952757 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.956359 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.959968 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:00 crc kubenswrapper[4576]: I1203 09:15:00.975915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msxdg\" (UniqueName: \"kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg\") pod \"nova-cell1-conductor-db-sync-bnv8z\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.164493 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.397915 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl"] Dec 03 09:15:01 crc kubenswrapper[4576]: W1203 09:15:01.417693 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cda19d3_1f9d_4c20_9a3f_eb3e19542a35.slice/crio-f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418 WatchSource:0}: Error finding container f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418: Status 404 returned error can't find the container with id f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418 Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.647959 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bnv8z"] Dec 03 09:15:01 crc kubenswrapper[4576]: W1203 09:15:01.669547 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc88ef9a8_712f_430d_859e_3acbf244c2b9.slice/crio-047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08 WatchSource:0}: Error finding container 047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08: Status 404 returned error can't find the container with id 047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08 Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.936570 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" event={"ID":"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35","Type":"ContainerStarted","Data":"9000014e5e90d665690f3ef7d8feeeaa29e07496ada51baf487e901867c6f830"} Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.936635 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" event={"ID":"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35","Type":"ContainerStarted","Data":"f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418"} Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.941804 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" event={"ID":"c88ef9a8-712f-430d-859e-3acbf244c2b9","Type":"ContainerStarted","Data":"047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08"} Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.944294 4576 generic.go:334] "Generic (PLEG): container finished" podID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerID="7a614826ad27b888bf2098152d98bd3bdfc1f1d2f1f80a0cfb81fcf4b1d7cfdd" exitCode=0 Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.944418 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" event={"ID":"7b4f90af-69a8-42f0-a715-ea40baaec5c3","Type":"ContainerDied","Data":"7a614826ad27b888bf2098152d98bd3bdfc1f1d2f1f80a0cfb81fcf4b1d7cfdd"} Dec 03 09:15:01 crc kubenswrapper[4576]: I1203 09:15:01.951859 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" podStartSLOduration=1.951836632 podStartE2EDuration="1.951836632s" podCreationTimestamp="2025-12-03 09:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:01.94988995 +0000 UTC m=+2109.335866924" watchObservedRunningTime="2025-12-03 09:15:01.951836632 +0000 UTC m=+2109.337813626" Dec 03 09:15:02 crc kubenswrapper[4576]: I1203 09:15:02.954910 4576 generic.go:334] "Generic (PLEG): container finished" podID="2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" containerID="9000014e5e90d665690f3ef7d8feeeaa29e07496ada51baf487e901867c6f830" exitCode=0 Dec 03 09:15:02 crc kubenswrapper[4576]: I1203 09:15:02.954957 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" event={"ID":"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35","Type":"ContainerDied","Data":"9000014e5e90d665690f3ef7d8feeeaa29e07496ada51baf487e901867c6f830"} Dec 03 09:15:02 crc kubenswrapper[4576]: I1203 09:15:02.957683 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" event={"ID":"c88ef9a8-712f-430d-859e-3acbf244c2b9","Type":"ContainerStarted","Data":"038ef87ea9073e8d8641b0f5a757e272141bd0fda3ef2096348e3e29fd319af6"} Dec 03 09:15:02 crc kubenswrapper[4576]: I1203 09:15:02.961421 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" event={"ID":"7b4f90af-69a8-42f0-a715-ea40baaec5c3","Type":"ContainerStarted","Data":"bf428d02dad0cbf5a0d575b860850cf79216c7c771ea996a1bdc4149fdd4eb2d"} Dec 03 09:15:02 crc kubenswrapper[4576]: I1203 09:15:02.962161 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:15:03 crc kubenswrapper[4576]: I1203 09:15:03.001120 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" podStartSLOduration=3.00110201 podStartE2EDuration="3.00110201s" podCreationTimestamp="2025-12-03 09:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:02.984884787 +0000 UTC m=+2110.370861771" watchObservedRunningTime="2025-12-03 09:15:03.00110201 +0000 UTC m=+2110.387078994" Dec 03 09:15:03 crc kubenswrapper[4576]: I1203 09:15:03.018337 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" podStartSLOduration=5.018313409 podStartE2EDuration="5.018313409s" podCreationTimestamp="2025-12-03 09:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:03.006551518 +0000 UTC m=+2110.392528502" watchObservedRunningTime="2025-12-03 09:15:03.018313409 +0000 UTC m=+2110.404290393" Dec 03 09:15:03 crc kubenswrapper[4576]: I1203 09:15:03.507754 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:03 crc kubenswrapper[4576]: I1203 09:15:03.553807 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.780334 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.885066 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume\") pod \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.885835 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume\") pod \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.886147 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w52m\" (UniqueName: \"kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m\") pod \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\" (UID: \"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35\") " Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.886537 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume" (OuterVolumeSpecName: "config-volume") pod "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" (UID: "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.886836 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.889409 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" (UID: "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.891380 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m" (OuterVolumeSpecName: "kube-api-access-6w52m") pod "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" (UID: "2cda19d3-1f9d-4c20-9a3f-eb3e19542a35"). InnerVolumeSpecName "kube-api-access-6w52m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.989066 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w52m\" (UniqueName: \"kubernetes.io/projected/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-kube-api-access-6w52m\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:05 crc kubenswrapper[4576]: I1203 09:15:05.989378 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:06 crc kubenswrapper[4576]: I1203 09:15:06.001617 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" event={"ID":"2cda19d3-1f9d-4c20-9a3f-eb3e19542a35","Type":"ContainerDied","Data":"f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418"} Dec 03 09:15:06 crc kubenswrapper[4576]: I1203 09:15:06.001667 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f997e38743f4741cc5cca6bec7e967e4aaef984a207d2d063555d6b78911a418" Dec 03 09:15:06 crc kubenswrapper[4576]: I1203 09:15:06.001736 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl" Dec 03 09:15:06 crc kubenswrapper[4576]: I1203 09:15:06.866724 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6"] Dec 03 09:15:06 crc kubenswrapper[4576]: I1203 09:15:06.886915 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412510-jwjq6"] Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.017369 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerStarted","Data":"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.017433 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerStarted","Data":"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.021464 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerStarted","Data":"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.021509 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerStarted","Data":"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.021546 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-log" containerID="cri-o://5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" gracePeriod=30 Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.021556 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-metadata" containerID="cri-o://e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" gracePeriod=30 Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.026322 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8d49b775-1c3c-430a-9d73-744e4186631f","Type":"ContainerStarted","Data":"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.026508 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8d49b775-1c3c-430a-9d73-744e4186631f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149" gracePeriod=30 Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.031804 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"386046da-1085-4aca-bc7d-a9960318c1cb","Type":"ContainerStarted","Data":"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064"} Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.035546 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.001901699 podStartE2EDuration="9.035516227s" podCreationTimestamp="2025-12-03 09:14:58 +0000 UTC" firstStartedPulling="2025-12-03 09:15:00.586585962 +0000 UTC m=+2107.972562946" lastFinishedPulling="2025-12-03 09:15:05.6202005 +0000 UTC m=+2113.006177474" observedRunningTime="2025-12-03 09:15:07.034421917 +0000 UTC m=+2114.420398901" watchObservedRunningTime="2025-12-03 09:15:07.035516227 +0000 UTC m=+2114.421493221" Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.056018 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.99318235 podStartE2EDuration="9.056000876s" podCreationTimestamp="2025-12-03 09:14:58 +0000 UTC" firstStartedPulling="2025-12-03 09:15:00.559183574 +0000 UTC m=+2107.945160558" lastFinishedPulling="2025-12-03 09:15:05.6220021 +0000 UTC m=+2113.007979084" observedRunningTime="2025-12-03 09:15:07.049864338 +0000 UTC m=+2114.435841322" watchObservedRunningTime="2025-12-03 09:15:07.056000876 +0000 UTC m=+2114.441977860" Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.087478 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.77624459 podStartE2EDuration="9.087459355s" podCreationTimestamp="2025-12-03 09:14:58 +0000 UTC" firstStartedPulling="2025-12-03 09:15:00.310766904 +0000 UTC m=+2107.696743888" lastFinishedPulling="2025-12-03 09:15:05.621981669 +0000 UTC m=+2113.007958653" observedRunningTime="2025-12-03 09:15:07.06673569 +0000 UTC m=+2114.452712674" watchObservedRunningTime="2025-12-03 09:15:07.087459355 +0000 UTC m=+2114.473436339" Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.092550 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.876667492 podStartE2EDuration="9.092540644s" podCreationTimestamp="2025-12-03 09:14:58 +0000 UTC" firstStartedPulling="2025-12-03 09:15:00.409786077 +0000 UTC m=+2107.795763061" lastFinishedPulling="2025-12-03 09:15:05.625659229 +0000 UTC m=+2113.011636213" observedRunningTime="2025-12-03 09:15:07.088438322 +0000 UTC m=+2114.474415306" watchObservedRunningTime="2025-12-03 09:15:07.092540644 +0000 UTC m=+2114.478517628" Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.463794 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.464468 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="4dce317b-36ba-47d9-9175-a50ed2bf038b" containerName="kube-state-metrics" containerID="cri-o://c7dda649a634af571e69f7c7a1613f11b15679886c666c57073df471b595e306" gracePeriod=30 Dec 03 09:15:07 crc kubenswrapper[4576]: I1203 09:15:07.693702 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70305d8f-8bdf-437b-ba4d-aaadc408205d" path="/var/lib/kubelet/pods/70305d8f-8bdf-437b-ba4d-aaadc408205d/volumes" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.011006 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065144 4576 generic.go:334] "Generic (PLEG): container finished" podID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerID="e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" exitCode=0 Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065179 4576 generic.go:334] "Generic (PLEG): container finished" podID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerID="5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" exitCode=143 Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065248 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerDied","Data":"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1"} Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065275 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerDied","Data":"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b"} Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065287 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4bd60085-c02a-48a1-92ef-d315753b63d3","Type":"ContainerDied","Data":"7f6f65122e86721597a3dfb0368aaaccda08fa66b657391ab290dccd7543b31b"} Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065302 4576 scope.go:117] "RemoveContainer" containerID="e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.065472 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.073398 4576 generic.go:334] "Generic (PLEG): container finished" podID="4dce317b-36ba-47d9-9175-a50ed2bf038b" containerID="c7dda649a634af571e69f7c7a1613f11b15679886c666c57073df471b595e306" exitCode=2 Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.074552 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4dce317b-36ba-47d9-9175-a50ed2bf038b","Type":"ContainerDied","Data":"c7dda649a634af571e69f7c7a1613f11b15679886c666c57073df471b595e306"} Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.159196 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle\") pod \"4bd60085-c02a-48a1-92ef-d315753b63d3\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.159790 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs\") pod \"4bd60085-c02a-48a1-92ef-d315753b63d3\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.159843 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7qnb\" (UniqueName: \"kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb\") pod \"4bd60085-c02a-48a1-92ef-d315753b63d3\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.159891 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") pod \"4bd60085-c02a-48a1-92ef-d315753b63d3\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.160682 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs" (OuterVolumeSpecName: "logs") pod "4bd60085-c02a-48a1-92ef-d315753b63d3" (UID: "4bd60085-c02a-48a1-92ef-d315753b63d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.165872 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb" (OuterVolumeSpecName: "kube-api-access-h7qnb") pod "4bd60085-c02a-48a1-92ef-d315753b63d3" (UID: "4bd60085-c02a-48a1-92ef-d315753b63d3"). InnerVolumeSpecName "kube-api-access-h7qnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.202263 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.208355 4576 scope.go:117] "RemoveContainer" containerID="5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" Dec 03 09:15:08 crc kubenswrapper[4576]: E1203 09:15:08.208551 4576 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data podName:4bd60085-c02a-48a1-92ef-d315753b63d3 nodeName:}" failed. No retries permitted until 2025-12-03 09:15:08.70849934 +0000 UTC m=+2116.094476324 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data") pod "4bd60085-c02a-48a1-92ef-d315753b63d3" (UID: "4bd60085-c02a-48a1-92ef-d315753b63d3") : error deleting /var/lib/kubelet/pods/4bd60085-c02a-48a1-92ef-d315753b63d3/volume-subpaths: remove /var/lib/kubelet/pods/4bd60085-c02a-48a1-92ef-d315753b63d3/volume-subpaths: no such file or directory Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.213932 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bd60085-c02a-48a1-92ef-d315753b63d3" (UID: "4bd60085-c02a-48a1-92ef-d315753b63d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.235834 4576 scope.go:117] "RemoveContainer" containerID="e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" Dec 03 09:15:08 crc kubenswrapper[4576]: E1203 09:15:08.239800 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1\": container with ID starting with e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1 not found: ID does not exist" containerID="e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.239846 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1"} err="failed to get container status \"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1\": rpc error: code = NotFound desc = could not find container \"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1\": container with ID starting with e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1 not found: ID does not exist" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.239876 4576 scope.go:117] "RemoveContainer" containerID="5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" Dec 03 09:15:08 crc kubenswrapper[4576]: E1203 09:15:08.240629 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b\": container with ID starting with 5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b not found: ID does not exist" containerID="5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.240658 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b"} err="failed to get container status \"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b\": rpc error: code = NotFound desc = could not find container \"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b\": container with ID starting with 5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b not found: ID does not exist" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.240672 4576 scope.go:117] "RemoveContainer" containerID="e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.244351 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1"} err="failed to get container status \"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1\": rpc error: code = NotFound desc = could not find container \"e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1\": container with ID starting with e76644a51c9ef5742793711a53de83b7e167f9c414c3906628e774ee5a21afd1 not found: ID does not exist" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.244383 4576 scope.go:117] "RemoveContainer" containerID="5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.244830 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b"} err="failed to get container status \"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b\": rpc error: code = NotFound desc = could not find container \"5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b\": container with ID starting with 5cc45d05d5397aa64676ea07971bc794a5f77fa8e01d07acc4e8e90127abfc3b not found: ID does not exist" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.262417 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.262453 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bd60085-c02a-48a1-92ef-d315753b63d3-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.262465 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7qnb\" (UniqueName: \"kubernetes.io/projected/4bd60085-c02a-48a1-92ef-d315753b63d3-kube-api-access-h7qnb\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.363586 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v952r\" (UniqueName: \"kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r\") pod \"4dce317b-36ba-47d9-9175-a50ed2bf038b\" (UID: \"4dce317b-36ba-47d9-9175-a50ed2bf038b\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.366900 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r" (OuterVolumeSpecName: "kube-api-access-v952r") pod "4dce317b-36ba-47d9-9175-a50ed2bf038b" (UID: "4dce317b-36ba-47d9-9175-a50ed2bf038b"). InnerVolumeSpecName "kube-api-access-v952r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.466067 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v952r\" (UniqueName: \"kubernetes.io/projected/4dce317b-36ba-47d9-9175-a50ed2bf038b-kube-api-access-v952r\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.725301 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.725375 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.757067 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.770341 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") pod \"4bd60085-c02a-48a1-92ef-d315753b63d3\" (UID: \"4bd60085-c02a-48a1-92ef-d315753b63d3\") " Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.791008 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data" (OuterVolumeSpecName: "config-data") pod "4bd60085-c02a-48a1-92ef-d315753b63d3" (UID: "4bd60085-c02a-48a1-92ef-d315753b63d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.798623 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.872723 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd60085-c02a-48a1-92ef-d315753b63d3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:08 crc kubenswrapper[4576]: I1203 09:15:08.999429 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.006401 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.028797 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: E1203 09:15:09.029252 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dce317b-36ba-47d9-9175-a50ed2bf038b" containerName="kube-state-metrics" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029267 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dce317b-36ba-47d9-9175-a50ed2bf038b" containerName="kube-state-metrics" Dec 03 09:15:09 crc kubenswrapper[4576]: E1203 09:15:09.029284 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-metadata" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029292 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-metadata" Dec 03 09:15:09 crc kubenswrapper[4576]: E1203 09:15:09.029323 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" containerName="collect-profiles" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029333 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" containerName="collect-profiles" Dec 03 09:15:09 crc kubenswrapper[4576]: E1203 09:15:09.029347 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-log" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029356 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-log" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029624 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dce317b-36ba-47d9-9175-a50ed2bf038b" containerName="kube-state-metrics" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029639 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-log" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029671 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" containerName="nova-metadata-metadata" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.029692 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" containerName="collect-profiles" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.030877 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.035939 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.036062 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.038025 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.080968 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.081017 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76f2q\" (UniqueName: \"kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.081034 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.081142 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.081164 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.109434 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.110219 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4dce317b-36ba-47d9-9175-a50ed2bf038b","Type":"ContainerDied","Data":"64fde3d445c0c1634611eec4bae7f48eace841a8052e812d5c0049dcbe70c3cf"} Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.110278 4576 scope.go:117] "RemoveContainer" containerID="c7dda649a634af571e69f7c7a1613f11b15679886c666c57073df471b595e306" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.155794 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.169241 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.182774 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.182838 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.182887 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.182921 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76f2q\" (UniqueName: \"kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.182941 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.184076 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.190139 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.190575 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.202622 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.203905 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.208976 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76f2q\" (UniqueName: \"kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.209131 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.209248 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.209248 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.212798 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data\") pod \"nova-metadata-0\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.215602 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.284775 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb7wp\" (UniqueName: \"kubernetes.io/projected/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-api-access-gb7wp\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.284844 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.284912 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.285027 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.351216 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.387764 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.387918 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.388011 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb7wp\" (UniqueName: \"kubernetes.io/projected/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-api-access-gb7wp\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.388059 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.397707 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.398012 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.401612 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.416098 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb7wp\" (UniqueName: \"kubernetes.io/projected/6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba-kube-api-access-gb7wp\") pod \"kube-state-metrics-0\" (UID: \"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba\") " pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.557228 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.558479 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.606662 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.628682 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.703402 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bd60085-c02a-48a1-92ef-d315753b63d3" path="/var/lib/kubelet/pods/4bd60085-c02a-48a1-92ef-d315753b63d3/volumes" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.704603 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dce317b-36ba-47d9-9175-a50ed2bf038b" path="/var/lib/kubelet/pods/4dce317b-36ba-47d9-9175-a50ed2bf038b/volumes" Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.705104 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:15:09 crc kubenswrapper[4576]: I1203 09:15:09.705299 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="dnsmasq-dns" containerID="cri-o://8dad5412efe5ec3057df4e8656c7cff2451a8506dc9af8e486c3c802f77ff1fd" gracePeriod=10 Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.062509 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.135679 4576 generic.go:334] "Generic (PLEG): container finished" podID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerID="8dad5412efe5ec3057df4e8656c7cff2451a8506dc9af8e486c3c802f77ff1fd" exitCode=0 Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.135769 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerDied","Data":"8dad5412efe5ec3057df4e8656c7cff2451a8506dc9af8e486c3c802f77ff1fd"} Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.154459 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerStarted","Data":"4c02f8dc9b5865527c820fa1596458a401e1ec292f2a0c2070e7f3056f0f2a25"} Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.417925 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.438121 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.524833 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.525637 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.525667 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq45j\" (UniqueName: \"kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.525797 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.525842 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.525863 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0\") pod \"100fee5d-5009-43cd-99c6-6a5dc15185e1\" (UID: \"100fee5d-5009-43cd-99c6-6a5dc15185e1\") " Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.535643 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j" (OuterVolumeSpecName: "kube-api-access-tq45j") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "kube-api-access-tq45j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.557667 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.558039 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.597661 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.637928 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.638003 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq45j\" (UniqueName: \"kubernetes.io/projected/100fee5d-5009-43cd-99c6-6a5dc15185e1-kube-api-access-tq45j\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.653613 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.703367 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.725250 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config" (OuterVolumeSpecName: "config") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.736654 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "100fee5d-5009-43cd-99c6-6a5dc15185e1" (UID: "100fee5d-5009-43cd-99c6-6a5dc15185e1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.740174 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.740201 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.740211 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:10 crc kubenswrapper[4576]: I1203 09:15:10.740219 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/100fee5d-5009-43cd-99c6-6a5dc15185e1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.102748 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.103692 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-central-agent" containerID="cri-o://e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5" gracePeriod=30 Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.104184 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="proxy-httpd" containerID="cri-o://8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1" gracePeriod=30 Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.104250 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="sg-core" containerID="cri-o://b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22" gracePeriod=30 Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.104285 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-notification-agent" containerID="cri-o://15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8" gracePeriod=30 Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.170713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerStarted","Data":"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac"} Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.170755 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerStarted","Data":"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674"} Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.189029 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba","Type":"ContainerStarted","Data":"7a600aa63849f781573b60e0a487030d71753b20a46f1e4fcf75706df8014c53"} Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.200426 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" event={"ID":"100fee5d-5009-43cd-99c6-6a5dc15185e1","Type":"ContainerDied","Data":"57bc70804c6fa94d67b9475c0a19fc6c749c3c087089930792dfbec6321febc2"} Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.200565 4576 scope.go:117] "RemoveContainer" containerID="8dad5412efe5ec3057df4e8656c7cff2451a8506dc9af8e486c3c802f77ff1fd" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.200817 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-9v8tt" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.204903 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.204880968 podStartE2EDuration="2.204880968s" podCreationTimestamp="2025-12-03 09:15:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:11.188496581 +0000 UTC m=+2118.574473565" watchObservedRunningTime="2025-12-03 09:15:11.204880968 +0000 UTC m=+2118.590857952" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.265820 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.267574 4576 scope.go:117] "RemoveContainer" containerID="cb23249c9e1b86979e0198a0eb75cf15f83233a58d110a1e783f04b4f6bea554" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.297786 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-9v8tt"] Dec 03 09:15:11 crc kubenswrapper[4576]: E1203 09:15:11.496256 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55b950c6_c4dc_4b77_bf16_eef5815be0d0.slice/crio-8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55b950c6_c4dc_4b77_bf16_eef5815be0d0.slice/crio-conmon-8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:15:11 crc kubenswrapper[4576]: I1203 09:15:11.687981 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" path="/var/lib/kubelet/pods/100fee5d-5009-43cd-99c6-6a5dc15185e1/volumes" Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.209987 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba","Type":"ContainerStarted","Data":"b24d421fd76a62332252170cd560447c4101b417341f851c8d52bddddd964104"} Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.210097 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213653 4576 generic.go:334] "Generic (PLEG): container finished" podID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerID="8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1" exitCode=0 Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213679 4576 generic.go:334] "Generic (PLEG): container finished" podID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerID="b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22" exitCode=2 Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213695 4576 generic.go:334] "Generic (PLEG): container finished" podID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerID="e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5" exitCode=0 Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213725 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerDied","Data":"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1"} Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213764 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerDied","Data":"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22"} Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.213776 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerDied","Data":"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5"} Dec 03 09:15:12 crc kubenswrapper[4576]: I1203 09:15:12.238313 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.8516078289999998 podStartE2EDuration="3.238294892s" podCreationTimestamp="2025-12-03 09:15:09 +0000 UTC" firstStartedPulling="2025-12-03 09:15:10.66704921 +0000 UTC m=+2118.053026194" lastFinishedPulling="2025-12-03 09:15:11.053736273 +0000 UTC m=+2118.439713257" observedRunningTime="2025-12-03 09:15:12.230695105 +0000 UTC m=+2119.616672089" watchObservedRunningTime="2025-12-03 09:15:12.238294892 +0000 UTC m=+2119.624271876" Dec 03 09:15:13 crc kubenswrapper[4576]: I1203 09:15:13.230123 4576 generic.go:334] "Generic (PLEG): container finished" podID="66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" containerID="740357c445c00011968b252024d87485977eeccd7e173d9004c382b862d7c8ed" exitCode=0 Dec 03 09:15:13 crc kubenswrapper[4576]: I1203 09:15:13.230246 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fm89l" event={"ID":"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24","Type":"ContainerDied","Data":"740357c445c00011968b252024d87485977eeccd7e173d9004c382b862d7c8ed"} Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.240610 4576 generic.go:334] "Generic (PLEG): container finished" podID="c88ef9a8-712f-430d-859e-3acbf244c2b9" containerID="038ef87ea9073e8d8641b0f5a757e272141bd0fda3ef2096348e3e29fd319af6" exitCode=0 Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.240703 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" event={"ID":"c88ef9a8-712f-430d-859e-3acbf244c2b9","Type":"ContainerDied","Data":"038ef87ea9073e8d8641b0f5a757e272141bd0fda3ef2096348e3e29fd319af6"} Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.351778 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.352098 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.629320 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.721352 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts\") pod \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.721406 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlv2h\" (UniqueName: \"kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h\") pod \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.721469 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data\") pod \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.721565 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle\") pod \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\" (UID: \"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24\") " Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.727184 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h" (OuterVolumeSpecName: "kube-api-access-wlv2h") pod "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" (UID: "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24"). InnerVolumeSpecName "kube-api-access-wlv2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.728030 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts" (OuterVolumeSpecName: "scripts") pod "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" (UID: "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.754671 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" (UID: "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.760045 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data" (OuterVolumeSpecName: "config-data") pod "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" (UID: "66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.824428 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.824469 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.824483 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:14 crc kubenswrapper[4576]: I1203 09:15:14.824496 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlv2h\" (UniqueName: \"kubernetes.io/projected/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24-kube-api-access-wlv2h\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.249733 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fm89l" event={"ID":"66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24","Type":"ContainerDied","Data":"bdcee4ffad261057e1cc1c96bb93061afe74cd8b9c8090d968ef04a284bc9447"} Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.250008 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdcee4ffad261057e1cc1c96bb93061afe74cd8b9c8090d968ef04a284bc9447" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.249763 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fm89l" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.446388 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.446724 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-log" containerID="cri-o://d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47" gracePeriod=30 Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.447771 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-api" containerID="cri-o://3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1" gracePeriod=30 Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.457927 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.458101 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" containerName="nova-scheduler-scheduler" containerID="cri-o://6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" gracePeriod=30 Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.486502 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.727520 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.851632 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle\") pod \"c88ef9a8-712f-430d-859e-3acbf244c2b9\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.851703 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts\") pod \"c88ef9a8-712f-430d-859e-3acbf244c2b9\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.851724 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msxdg\" (UniqueName: \"kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg\") pod \"c88ef9a8-712f-430d-859e-3acbf244c2b9\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.851748 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data\") pod \"c88ef9a8-712f-430d-859e-3acbf244c2b9\" (UID: \"c88ef9a8-712f-430d-859e-3acbf244c2b9\") " Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.859915 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts" (OuterVolumeSpecName: "scripts") pod "c88ef9a8-712f-430d-859e-3acbf244c2b9" (UID: "c88ef9a8-712f-430d-859e-3acbf244c2b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.887995 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg" (OuterVolumeSpecName: "kube-api-access-msxdg") pod "c88ef9a8-712f-430d-859e-3acbf244c2b9" (UID: "c88ef9a8-712f-430d-859e-3acbf244c2b9"). InnerVolumeSpecName "kube-api-access-msxdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.934220 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data" (OuterVolumeSpecName: "config-data") pod "c88ef9a8-712f-430d-859e-3acbf244c2b9" (UID: "c88ef9a8-712f-430d-859e-3acbf244c2b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.951636 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c88ef9a8-712f-430d-859e-3acbf244c2b9" (UID: "c88ef9a8-712f-430d-859e-3acbf244c2b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.953987 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.954012 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msxdg\" (UniqueName: \"kubernetes.io/projected/c88ef9a8-712f-430d-859e-3acbf244c2b9-kube-api-access-msxdg\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.954024 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:15 crc kubenswrapper[4576]: I1203 09:15:15.954035 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88ef9a8-712f-430d-859e-3acbf244c2b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.067156 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.157993 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158079 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158113 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158140 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hpbh\" (UniqueName: \"kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158187 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158223 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.158297 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd\") pod \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\" (UID: \"55b950c6-c4dc-4b77-bf16-eef5815be0d0\") " Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.165454 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.171602 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.183163 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh" (OuterVolumeSpecName: "kube-api-access-9hpbh") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "kube-api-access-9hpbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.190761 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts" (OuterVolumeSpecName: "scripts") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.223847 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.261274 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.261311 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.261322 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55b950c6-c4dc-4b77-bf16-eef5815be0d0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.261334 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.261347 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hpbh\" (UniqueName: \"kubernetes.io/projected/55b950c6-c4dc-4b77-bf16-eef5815be0d0-kube-api-access-9hpbh\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.266038 4576 generic.go:334] "Generic (PLEG): container finished" podID="0aba50e0-b5ee-461b-837c-137175381621" containerID="d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47" exitCode=143 Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.266209 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerDied","Data":"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47"} Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.268676 4576 generic.go:334] "Generic (PLEG): container finished" podID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerID="15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8" exitCode=0 Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.268749 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.268762 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerDied","Data":"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8"} Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.270154 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55b950c6-c4dc-4b77-bf16-eef5815be0d0","Type":"ContainerDied","Data":"ec6e7be302780c943b57dbafb6577feb1279315130e7311dded2dd89194f8e30"} Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.270287 4576 scope.go:117] "RemoveContainer" containerID="8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.275377 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-log" containerID="cri-o://8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" gracePeriod=30 Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.276154 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.277635 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bnv8z" event={"ID":"c88ef9a8-712f-430d-859e-3acbf244c2b9","Type":"ContainerDied","Data":"047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08"} Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.277666 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="047d08886a4b1caa4915d10782b4ecfdf57102cc209b64e822f7784623081f08" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.278101 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-metadata" containerID="cri-o://aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" gracePeriod=30 Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.282065 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.350780 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data" (OuterVolumeSpecName: "config-data") pod "55b950c6-c4dc-4b77-bf16-eef5815be0d0" (UID: "55b950c6-c4dc-4b77-bf16-eef5815be0d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.353475 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.353945 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-notification-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.353964 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-notification-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.353987 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-central-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.353994 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-central-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354006 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c88ef9a8-712f-430d-859e-3acbf244c2b9" containerName="nova-cell1-conductor-db-sync" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354013 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c88ef9a8-712f-430d-859e-3acbf244c2b9" containerName="nova-cell1-conductor-db-sync" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354028 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="init" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354035 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="init" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354066 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="sg-core" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354076 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="sg-core" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354090 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="dnsmasq-dns" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354097 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="dnsmasq-dns" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354112 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="proxy-httpd" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354120 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="proxy-httpd" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.354136 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" containerName="nova-manage" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.354141 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" containerName="nova-manage" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357146 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-notification-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357187 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c88ef9a8-712f-430d-859e-3acbf244c2b9" containerName="nova-cell1-conductor-db-sync" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357201 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="proxy-httpd" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357212 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="sg-core" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357229 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="100fee5d-5009-43cd-99c6-6a5dc15185e1" containerName="dnsmasq-dns" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357240 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" containerName="ceilometer-central-agent" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.357252 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" containerName="nova-manage" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.358088 4576 scope.go:117] "RemoveContainer" containerID="b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.360815 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.362911 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.363421 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.363471 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55b950c6-c4dc-4b77-bf16-eef5815be0d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.379696 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.420741 4576 scope.go:117] "RemoveContainer" containerID="15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.451157 4576 scope.go:117] "RemoveContainer" containerID="e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.465220 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.465678 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nzsm\" (UniqueName: \"kubernetes.io/projected/2be87e85-f004-483d-9faf-4f1dd3a050bd-kube-api-access-9nzsm\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.465733 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.505183 4576 scope.go:117] "RemoveContainer" containerID="8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.505912 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1\": container with ID starting with 8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1 not found: ID does not exist" containerID="8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.505944 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1"} err="failed to get container status \"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1\": rpc error: code = NotFound desc = could not find container \"8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1\": container with ID starting with 8201202b2f4bc6aceee324eb432765d52f9356f24031bb84452984810b6338f1 not found: ID does not exist" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.505982 4576 scope.go:117] "RemoveContainer" containerID="b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.506312 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22\": container with ID starting with b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22 not found: ID does not exist" containerID="b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.506405 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22"} err="failed to get container status \"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22\": rpc error: code = NotFound desc = could not find container \"b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22\": container with ID starting with b196439ebcb65c12ceb3d11f2087fa90d4b9fe8fa552c184587c0f7350450a22 not found: ID does not exist" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.506428 4576 scope.go:117] "RemoveContainer" containerID="15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.506863 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8\": container with ID starting with 15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8 not found: ID does not exist" containerID="15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.506903 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8"} err="failed to get container status \"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8\": rpc error: code = NotFound desc = could not find container \"15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8\": container with ID starting with 15024ed74a3e4a1f716cc4661feb10dd2385a4bdfafa496eccf3e12fa80787b8 not found: ID does not exist" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.506933 4576 scope.go:117] "RemoveContainer" containerID="e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5" Dec 03 09:15:16 crc kubenswrapper[4576]: E1203 09:15:16.507257 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5\": container with ID starting with e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5 not found: ID does not exist" containerID="e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.507285 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5"} err="failed to get container status \"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5\": rpc error: code = NotFound desc = could not find container \"e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5\": container with ID starting with e23b9064c9a940f58e5bf095faec33b1f496ae4cf8c714510dd1881be53dd1d5 not found: ID does not exist" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.569989 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nzsm\" (UniqueName: \"kubernetes.io/projected/2be87e85-f004-483d-9faf-4f1dd3a050bd-kube-api-access-9nzsm\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.570054 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.570094 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.589436 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.591186 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be87e85-f004-483d-9faf-4f1dd3a050bd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.598257 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nzsm\" (UniqueName: \"kubernetes.io/projected/2be87e85-f004-483d-9faf-4f1dd3a050bd-kube-api-access-9nzsm\") pod \"nova-cell1-conductor-0\" (UID: \"2be87e85-f004-483d-9faf-4f1dd3a050bd\") " pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.695088 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.700846 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.719788 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.748259 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.750507 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.760448 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.762117 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.762480 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.768140 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.876751 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.876877 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.876910 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.876982 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.877010 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.877057 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.877088 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.877122 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brjph\" (UniqueName: \"kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.951108 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983327 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983380 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983435 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983473 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983588 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brjph\" (UniqueName: \"kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983686 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983783 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.983807 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.984286 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.985164 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.989681 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.991436 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.991681 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.991883 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:16 crc kubenswrapper[4576]: I1203 09:15:16.993110 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.015200 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brjph\" (UniqueName: \"kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph\") pod \"ceilometer-0\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " pod="openstack/ceilometer-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.084604 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76f2q\" (UniqueName: \"kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q\") pod \"19dc929b-450f-4086-9577-c36504564326\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.084658 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs\") pod \"19dc929b-450f-4086-9577-c36504564326\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.084699 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs\") pod \"19dc929b-450f-4086-9577-c36504564326\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.084717 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data\") pod \"19dc929b-450f-4086-9577-c36504564326\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.084865 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle\") pod \"19dc929b-450f-4086-9577-c36504564326\" (UID: \"19dc929b-450f-4086-9577-c36504564326\") " Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.088380 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs" (OuterVolumeSpecName: "logs") pod "19dc929b-450f-4086-9577-c36504564326" (UID: "19dc929b-450f-4086-9577-c36504564326"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.092259 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q" (OuterVolumeSpecName: "kube-api-access-76f2q") pod "19dc929b-450f-4086-9577-c36504564326" (UID: "19dc929b-450f-4086-9577-c36504564326"). InnerVolumeSpecName "kube-api-access-76f2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.119325 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19dc929b-450f-4086-9577-c36504564326" (UID: "19dc929b-450f-4086-9577-c36504564326"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.120634 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data" (OuterVolumeSpecName: "config-data") pod "19dc929b-450f-4086-9577-c36504564326" (UID: "19dc929b-450f-4086-9577-c36504564326"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.135906 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "19dc929b-450f-4086-9577-c36504564326" (UID: "19dc929b-450f-4086-9577-c36504564326"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.161580 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.188477 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76f2q\" (UniqueName: \"kubernetes.io/projected/19dc929b-450f-4086-9577-c36504564326-kube-api-access-76f2q\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.188510 4576 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.188519 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19dc929b-450f-4086-9577-c36504564326-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.188600 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.188610 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19dc929b-450f-4086-9577-c36504564326-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.269811 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298145 4576 generic.go:334] "Generic (PLEG): container finished" podID="19dc929b-450f-4086-9577-c36504564326" containerID="aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" exitCode=0 Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298180 4576 generic.go:334] "Generic (PLEG): container finished" podID="19dc929b-450f-4086-9577-c36504564326" containerID="8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" exitCode=143 Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298198 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerDied","Data":"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac"} Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298219 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerDied","Data":"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674"} Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298229 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"19dc929b-450f-4086-9577-c36504564326","Type":"ContainerDied","Data":"4c02f8dc9b5865527c820fa1596458a401e1ec292f2a0c2070e7f3056f0f2a25"} Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298244 4576 scope.go:117] "RemoveContainer" containerID="aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.298332 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.402915 4576 scope.go:117] "RemoveContainer" containerID="8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.412267 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.440574 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.441602 4576 scope.go:117] "RemoveContainer" containerID="aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" Dec 03 09:15:17 crc kubenswrapper[4576]: E1203 09:15:17.442123 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac\": container with ID starting with aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac not found: ID does not exist" containerID="aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.442155 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac"} err="failed to get container status \"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac\": rpc error: code = NotFound desc = could not find container \"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac\": container with ID starting with aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac not found: ID does not exist" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.442182 4576 scope.go:117] "RemoveContainer" containerID="8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" Dec 03 09:15:17 crc kubenswrapper[4576]: E1203 09:15:17.442762 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674\": container with ID starting with 8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674 not found: ID does not exist" containerID="8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.442820 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674"} err="failed to get container status \"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674\": rpc error: code = NotFound desc = could not find container \"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674\": container with ID starting with 8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674 not found: ID does not exist" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.442854 4576 scope.go:117] "RemoveContainer" containerID="aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.443293 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac"} err="failed to get container status \"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac\": rpc error: code = NotFound desc = could not find container \"aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac\": container with ID starting with aef29caded1837393ba2781aa1afbecc2ba77451e58fab3eda27bbeab9176fac not found: ID does not exist" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.443313 4576 scope.go:117] "RemoveContainer" containerID="8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.443727 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674"} err="failed to get container status \"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674\": rpc error: code = NotFound desc = could not find container \"8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674\": container with ID starting with 8bd7eeb6038109515e46071114889f9c992e2947e1448984d5ef0eaed6912674 not found: ID does not exist" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.451759 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: E1203 09:15:17.452147 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-metadata" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.452165 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-metadata" Dec 03 09:15:17 crc kubenswrapper[4576]: E1203 09:15:17.452182 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-log" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.452189 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-log" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.452746 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-metadata" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.452780 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="19dc929b-450f-4086-9577-c36504564326" containerName="nova-metadata-log" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.453697 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.456184 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.456388 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.474327 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.511190 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.511250 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.511297 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.511321 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.511371 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ptl4\" (UniqueName: \"kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613015 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613089 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613152 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ptl4\" (UniqueName: \"kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613668 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613922 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.613986 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.620229 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.621071 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.622245 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.630399 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ptl4\" (UniqueName: \"kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4\") pod \"nova-metadata-0\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " pod="openstack/nova-metadata-0" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.703983 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19dc929b-450f-4086-9577-c36504564326" path="/var/lib/kubelet/pods/19dc929b-450f-4086-9577-c36504564326/volumes" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.707986 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55b950c6-c4dc-4b77-bf16-eef5815be0d0" path="/var/lib/kubelet/pods/55b950c6-c4dc-4b77-bf16-eef5815be0d0/volumes" Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.726886 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:17 crc kubenswrapper[4576]: W1203 09:15:17.737131 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ff8575_080b_4631_865b_2e9cb4d2c776.slice/crio-62b27011aedd5345fcfe707638045784fbba4bdb8d80ce228732918373197c34 WatchSource:0}: Error finding container 62b27011aedd5345fcfe707638045784fbba4bdb8d80ce228732918373197c34: Status 404 returned error can't find the container with id 62b27011aedd5345fcfe707638045784fbba4bdb8d80ce228732918373197c34 Dec 03 09:15:17 crc kubenswrapper[4576]: I1203 09:15:17.784727 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.096297 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:18 crc kubenswrapper[4576]: W1203 09:15:18.113395 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10c796b7_b92d_4512_828a_59e082e97b16.slice/crio-1397c7b38bce2c35a178c42295b9a818cd643fd8ee0e822a05368a582fc272ca WatchSource:0}: Error finding container 1397c7b38bce2c35a178c42295b9a818cd643fd8ee0e822a05368a582fc272ca: Status 404 returned error can't find the container with id 1397c7b38bce2c35a178c42295b9a818cd643fd8ee0e822a05368a582fc272ca Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.311403 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerStarted","Data":"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8"} Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.311447 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerStarted","Data":"1397c7b38bce2c35a178c42295b9a818cd643fd8ee0e822a05368a582fc272ca"} Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.314296 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2be87e85-f004-483d-9faf-4f1dd3a050bd","Type":"ContainerStarted","Data":"d35bff41021cdadb11712eb290afd44707fe83b18cfcd3156a91009b6a82356c"} Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.314539 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2be87e85-f004-483d-9faf-4f1dd3a050bd","Type":"ContainerStarted","Data":"7019b68838dee264621c294906823b04ebb63cc6bc805e40d970c90c4de0edea"} Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.314705 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.322130 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerStarted","Data":"62b27011aedd5345fcfe707638045784fbba4bdb8d80ce228732918373197c34"} Dec 03 09:15:18 crc kubenswrapper[4576]: I1203 09:15:18.332025 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.332005562 podStartE2EDuration="2.332005562s" podCreationTimestamp="2025-12-03 09:15:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:18.329893645 +0000 UTC m=+2125.715870629" watchObservedRunningTime="2025-12-03 09:15:18.332005562 +0000 UTC m=+2125.717982546" Dec 03 09:15:18 crc kubenswrapper[4576]: E1203 09:15:18.726800 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 09:15:18 crc kubenswrapper[4576]: E1203 09:15:18.728437 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 09:15:18 crc kubenswrapper[4576]: E1203 09:15:18.739899 4576 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 09:15:18 crc kubenswrapper[4576]: E1203 09:15:18.740015 4576 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" containerName="nova-scheduler-scheduler" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.316670 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.346471 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerStarted","Data":"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788"} Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.361378 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerStarted","Data":"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034"} Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.394456 4576 generic.go:334] "Generic (PLEG): container finished" podID="0aba50e0-b5ee-461b-837c-137175381621" containerID="3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1" exitCode=0 Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.394607 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.394608 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerDied","Data":"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1"} Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.395509 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0aba50e0-b5ee-461b-837c-137175381621","Type":"ContainerDied","Data":"c408832e626701bbefa6e9b7b988816ceb8a78e9ef8d0da118978f7cef4a4469"} Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.395550 4576 scope.go:117] "RemoveContainer" containerID="3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.423550 4576 scope.go:117] "RemoveContainer" containerID="d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.459075 4576 scope.go:117] "RemoveContainer" containerID="3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1" Dec 03 09:15:19 crc kubenswrapper[4576]: E1203 09:15:19.468480 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1\": container with ID starting with 3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1 not found: ID does not exist" containerID="3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.468543 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1"} err="failed to get container status \"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1\": rpc error: code = NotFound desc = could not find container \"3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1\": container with ID starting with 3d85fe1b393ea56a48fdffa15c038b96bdc31cc18390b3c7bfe3332f1ed067b1 not found: ID does not exist" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.468572 4576 scope.go:117] "RemoveContainer" containerID="d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.471499 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hk2f2\" (UniqueName: \"kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2\") pod \"0aba50e0-b5ee-461b-837c-137175381621\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.471572 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data\") pod \"0aba50e0-b5ee-461b-837c-137175381621\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.471628 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle\") pod \"0aba50e0-b5ee-461b-837c-137175381621\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.471731 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs\") pod \"0aba50e0-b5ee-461b-837c-137175381621\" (UID: \"0aba50e0-b5ee-461b-837c-137175381621\") " Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.472331 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs" (OuterVolumeSpecName: "logs") pod "0aba50e0-b5ee-461b-837c-137175381621" (UID: "0aba50e0-b5ee-461b-837c-137175381621"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:19 crc kubenswrapper[4576]: E1203 09:15:19.485008 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47\": container with ID starting with d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47 not found: ID does not exist" containerID="d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.485058 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47"} err="failed to get container status \"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47\": rpc error: code = NotFound desc = could not find container \"d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47\": container with ID starting with d9d514e0ad889b431d0d22bb4adee4349d16189b109df2c897414f9a4bf26b47 not found: ID does not exist" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.493772 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2" (OuterVolumeSpecName: "kube-api-access-hk2f2") pod "0aba50e0-b5ee-461b-837c-137175381621" (UID: "0aba50e0-b5ee-461b-837c-137175381621"). InnerVolumeSpecName "kube-api-access-hk2f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.516157 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data" (OuterVolumeSpecName: "config-data") pod "0aba50e0-b5ee-461b-837c-137175381621" (UID: "0aba50e0-b5ee-461b-837c-137175381621"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.529675 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aba50e0-b5ee-461b-837c-137175381621" (UID: "0aba50e0-b5ee-461b-837c-137175381621"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.576084 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.576117 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hk2f2\" (UniqueName: \"kubernetes.io/projected/0aba50e0-b5ee-461b-837c-137175381621-kube-api-access-hk2f2\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.576127 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba50e0-b5ee-461b-837c-137175381621-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.576136 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba50e0-b5ee-461b-837c-137175381621-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.627349 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.652091 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.652072 podStartE2EDuration="2.652072s" podCreationTimestamp="2025-12-03 09:15:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:19.378651768 +0000 UTC m=+2126.764628752" watchObservedRunningTime="2025-12-03 09:15:19.652072 +0000 UTC m=+2127.038048984" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.740128 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.747780 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.790254 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:19 crc kubenswrapper[4576]: E1203 09:15:19.790655 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-api" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.790672 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-api" Dec 03 09:15:19 crc kubenswrapper[4576]: E1203 09:15:19.790719 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-log" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.790726 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-log" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.790940 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-api" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.790961 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aba50e0-b5ee-461b-837c-137175381621" containerName="nova-api-log" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.791937 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.802401 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.807208 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.897016 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.897081 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.897117 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg9w9\" (UniqueName: \"kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:19 crc kubenswrapper[4576]: I1203 09:15:19.897168 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.003541 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.003916 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg9w9\" (UniqueName: \"kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.004002 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.004148 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.004253 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.013112 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.018880 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.022366 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg9w9\" (UniqueName: \"kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9\") pod \"nova-api-0\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.144472 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.169951 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.309140 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle\") pod \"386046da-1085-4aca-bc7d-a9960318c1cb\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.309194 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vptw\" (UniqueName: \"kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw\") pod \"386046da-1085-4aca-bc7d-a9960318c1cb\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.309261 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data\") pod \"386046da-1085-4aca-bc7d-a9960318c1cb\" (UID: \"386046da-1085-4aca-bc7d-a9960318c1cb\") " Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.313175 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw" (OuterVolumeSpecName: "kube-api-access-9vptw") pod "386046da-1085-4aca-bc7d-a9960318c1cb" (UID: "386046da-1085-4aca-bc7d-a9960318c1cb"). InnerVolumeSpecName "kube-api-access-9vptw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.348640 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data" (OuterVolumeSpecName: "config-data") pod "386046da-1085-4aca-bc7d-a9960318c1cb" (UID: "386046da-1085-4aca-bc7d-a9960318c1cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.369360 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "386046da-1085-4aca-bc7d-a9960318c1cb" (UID: "386046da-1085-4aca-bc7d-a9960318c1cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.417792 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.417825 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386046da-1085-4aca-bc7d-a9960318c1cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.417836 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vptw\" (UniqueName: \"kubernetes.io/projected/386046da-1085-4aca-bc7d-a9960318c1cb-kube-api-access-9vptw\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.429798 4576 generic.go:334] "Generic (PLEG): container finished" podID="386046da-1085-4aca-bc7d-a9960318c1cb" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" exitCode=0 Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.429924 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.429936 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"386046da-1085-4aca-bc7d-a9960318c1cb","Type":"ContainerDied","Data":"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064"} Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.429983 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"386046da-1085-4aca-bc7d-a9960318c1cb","Type":"ContainerDied","Data":"8273f6fb07401c3bbdb72ad039ce12aa6bf1c0f2807c67759139273f5472cfec"} Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.430006 4576 scope.go:117] "RemoveContainer" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.446588 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerStarted","Data":"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e"} Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.446661 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerStarted","Data":"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626"} Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.509145 4576 scope.go:117] "RemoveContainer" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" Dec 03 09:15:20 crc kubenswrapper[4576]: E1203 09:15:20.509824 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064\": container with ID starting with 6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064 not found: ID does not exist" containerID="6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.509938 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064"} err="failed to get container status \"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064\": rpc error: code = NotFound desc = could not find container \"6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064\": container with ID starting with 6ddf33c0754e54668f7bc428a6e1908109b8791fb015b0f348daa101cfa41064 not found: ID does not exist" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.517378 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.529707 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.538568 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:20 crc kubenswrapper[4576]: E1203 09:15:20.539051 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" containerName="nova-scheduler-scheduler" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.539064 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" containerName="nova-scheduler-scheduler" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.539239 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" containerName="nova-scheduler-scheduler" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.539890 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.541733 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.549594 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.715188 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.723356 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.723413 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbg2\" (UniqueName: \"kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.723438 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.829338 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.829723 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbg2\" (UniqueName: \"kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.829756 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.844241 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.847692 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbg2\" (UniqueName: \"kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.863749 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data\") pod \"nova-scheduler-0\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " pod="openstack/nova-scheduler-0" Dec 03 09:15:20 crc kubenswrapper[4576]: I1203 09:15:20.864176 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:15:21 crc kubenswrapper[4576]: W1203 09:15:21.365586 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59e9e04f_48ad_432a_bef6_16751a2eaf90.slice/crio-4de2775d640469d6b6f4eacd2e55a347fa820c2bd960c92b643e97681fd662cf WatchSource:0}: Error finding container 4de2775d640469d6b6f4eacd2e55a347fa820c2bd960c92b643e97681fd662cf: Status 404 returned error can't find the container with id 4de2775d640469d6b6f4eacd2e55a347fa820c2bd960c92b643e97681fd662cf Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.377175 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.481323 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerStarted","Data":"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a"} Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.481649 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerStarted","Data":"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6"} Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.481660 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerStarted","Data":"1cf30f7044356cd32c077aaded1fa8ed32d09d7673003d0b358c0deb80455b5e"} Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.493042 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"59e9e04f-48ad-432a-bef6-16751a2eaf90","Type":"ContainerStarted","Data":"4de2775d640469d6b6f4eacd2e55a347fa820c2bd960c92b643e97681fd662cf"} Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.503175 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.50316041 podStartE2EDuration="2.50316041s" podCreationTimestamp="2025-12-03 09:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:21.501475994 +0000 UTC m=+2128.887452978" watchObservedRunningTime="2025-12-03 09:15:21.50316041 +0000 UTC m=+2128.889137394" Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.690575 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aba50e0-b5ee-461b-837c-137175381621" path="/var/lib/kubelet/pods/0aba50e0-b5ee-461b-837c-137175381621/volumes" Dec 03 09:15:21 crc kubenswrapper[4576]: I1203 09:15:21.692585 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="386046da-1085-4aca-bc7d-a9960318c1cb" path="/var/lib/kubelet/pods/386046da-1085-4aca-bc7d-a9960318c1cb/volumes" Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.534497 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"59e9e04f-48ad-432a-bef6-16751a2eaf90","Type":"ContainerStarted","Data":"544f1a202da33a0848881e8d66e3794a51977a210446fc41d52de7bd059d711a"} Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.537266 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerStarted","Data":"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f"} Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.558034 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.55801471 podStartE2EDuration="2.55801471s" podCreationTimestamp="2025-12-03 09:15:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:22.556714504 +0000 UTC m=+2129.942691488" watchObservedRunningTime="2025-12-03 09:15:22.55801471 +0000 UTC m=+2129.943991694" Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.588000 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.667764056 podStartE2EDuration="6.587981597s" podCreationTimestamp="2025-12-03 09:15:16 +0000 UTC" firstStartedPulling="2025-12-03 09:15:17.740623633 +0000 UTC m=+2125.126600617" lastFinishedPulling="2025-12-03 09:15:21.660841174 +0000 UTC m=+2129.046818158" observedRunningTime="2025-12-03 09:15:22.580446142 +0000 UTC m=+2129.966423126" watchObservedRunningTime="2025-12-03 09:15:22.587981597 +0000 UTC m=+2129.973958581" Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.785988 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:15:22 crc kubenswrapper[4576]: I1203 09:15:22.786042 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:15:23 crc kubenswrapper[4576]: I1203 09:15:23.546823 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:15:25 crc kubenswrapper[4576]: I1203 09:15:25.865349 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 09:15:26 crc kubenswrapper[4576]: I1203 09:15:26.725290 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 09:15:27 crc kubenswrapper[4576]: I1203 09:15:27.785896 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 09:15:27 crc kubenswrapper[4576]: I1203 09:15:27.785938 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 09:15:28 crc kubenswrapper[4576]: I1203 09:15:28.801767 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:28 crc kubenswrapper[4576]: I1203 09:15:28.801767 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:29 crc kubenswrapper[4576]: I1203 09:15:29.103314 4576 scope.go:117] "RemoveContainer" containerID="7427416535c820c63b653125794914811da6781790bd7deb7d82db900b897430" Dec 03 09:15:30 crc kubenswrapper[4576]: I1203 09:15:30.170819 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:30 crc kubenswrapper[4576]: I1203 09:15:30.171018 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:30 crc kubenswrapper[4576]: I1203 09:15:30.864766 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 09:15:30 crc kubenswrapper[4576]: I1203 09:15:30.898610 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 09:15:31 crc kubenswrapper[4576]: I1203 09:15:31.253262 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:31 crc kubenswrapper[4576]: I1203 09:15:31.253435 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:15:31 crc kubenswrapper[4576]: I1203 09:15:31.690128 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.456388 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.626561 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle\") pod \"8d49b775-1c3c-430a-9d73-744e4186631f\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.626625 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data\") pod \"8d49b775-1c3c-430a-9d73-744e4186631f\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.626751 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm5sx\" (UniqueName: \"kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx\") pod \"8d49b775-1c3c-430a-9d73-744e4186631f\" (UID: \"8d49b775-1c3c-430a-9d73-744e4186631f\") " Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.635844 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx" (OuterVolumeSpecName: "kube-api-access-fm5sx") pod "8d49b775-1c3c-430a-9d73-744e4186631f" (UID: "8d49b775-1c3c-430a-9d73-744e4186631f"). InnerVolumeSpecName "kube-api-access-fm5sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.655395 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d49b775-1c3c-430a-9d73-744e4186631f" (UID: "8d49b775-1c3c-430a-9d73-744e4186631f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.659405 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data" (OuterVolumeSpecName: "config-data") pod "8d49b775-1c3c-430a-9d73-744e4186631f" (UID: "8d49b775-1c3c-430a-9d73-744e4186631f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.708089 4576 generic.go:334] "Generic (PLEG): container finished" podID="8d49b775-1c3c-430a-9d73-744e4186631f" containerID="a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149" exitCode=137 Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.708138 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.708145 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8d49b775-1c3c-430a-9d73-744e4186631f","Type":"ContainerDied","Data":"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149"} Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.708480 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8d49b775-1c3c-430a-9d73-744e4186631f","Type":"ContainerDied","Data":"1a73784ae7f0fc5001ad713467deb054ba6bdda4228df130121336ab35a7204c"} Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.708505 4576 scope.go:117] "RemoveContainer" containerID="a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.729058 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm5sx\" (UniqueName: \"kubernetes.io/projected/8d49b775-1c3c-430a-9d73-744e4186631f-kube-api-access-fm5sx\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.729101 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.729116 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d49b775-1c3c-430a-9d73-744e4186631f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.750309 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.755731 4576 scope.go:117] "RemoveContainer" containerID="a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149" Dec 03 09:15:37 crc kubenswrapper[4576]: E1203 09:15:37.756100 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149\": container with ID starting with a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149 not found: ID does not exist" containerID="a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.756130 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149"} err="failed to get container status \"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149\": rpc error: code = NotFound desc = could not find container \"a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149\": container with ID starting with a37ad400beaa2fbabddb7cb25f0c549e11a78967e5d91b73fdf808c94b2a1149 not found: ID does not exist" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.764493 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.775834 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:37 crc kubenswrapper[4576]: E1203 09:15:37.776247 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d49b775-1c3c-430a-9d73-744e4186631f" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.776264 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d49b775-1c3c-430a-9d73-744e4186631f" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.776444 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d49b775-1c3c-430a-9d73-744e4186631f" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.777073 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.778808 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.779756 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.779973 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.788043 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.808867 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.809264 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.816594 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.935988 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.936262 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.936360 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4g6c\" (UniqueName: \"kubernetes.io/projected/69cd46ae-0738-4d84-87a3-077751519dc4-kube-api-access-d4g6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.936640 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:37 crc kubenswrapper[4576]: I1203 09:15:37.936742 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.039171 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.039227 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.039290 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4g6c\" (UniqueName: \"kubernetes.io/projected/69cd46ae-0738-4d84-87a3-077751519dc4-kube-api-access-d4g6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.039462 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.039502 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.043903 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.044199 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.044247 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.046089 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/69cd46ae-0738-4d84-87a3-077751519dc4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.055722 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4g6c\" (UniqueName: \"kubernetes.io/projected/69cd46ae-0738-4d84-87a3-077751519dc4-kube-api-access-d4g6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"69cd46ae-0738-4d84-87a3-077751519dc4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.106104 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.559909 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.718801 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"69cd46ae-0738-4d84-87a3-077751519dc4","Type":"ContainerStarted","Data":"abc0e7e7fb433864203e57390e5856a90648e92ca61e5ce94c1de4c216022ac1"} Dec 03 09:15:38 crc kubenswrapper[4576]: I1203 09:15:38.726279 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 09:15:39 crc kubenswrapper[4576]: I1203 09:15:39.690444 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d49b775-1c3c-430a-9d73-744e4186631f" path="/var/lib/kubelet/pods/8d49b775-1c3c-430a-9d73-744e4186631f/volumes" Dec 03 09:15:39 crc kubenswrapper[4576]: I1203 09:15:39.730817 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"69cd46ae-0738-4d84-87a3-077751519dc4","Type":"ContainerStarted","Data":"3c9d3c47a2c3ca626a76734f0fb824be91721b67e068f38559e6324e30c5263d"} Dec 03 09:15:39 crc kubenswrapper[4576]: I1203 09:15:39.756573 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.756549054 podStartE2EDuration="2.756549054s" podCreationTimestamp="2025-12-03 09:15:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:39.748853374 +0000 UTC m=+2147.134830358" watchObservedRunningTime="2025-12-03 09:15:39.756549054 +0000 UTC m=+2147.142526038" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.174871 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.175464 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.176662 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.180150 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.741767 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.745944 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.957159 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.958947 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:40 crc kubenswrapper[4576]: I1203 09:15:40.992811 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105576 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105642 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105718 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105762 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fltnf\" (UniqueName: \"kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105790 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.105823 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207510 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207590 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fltnf\" (UniqueName: \"kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207625 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207657 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207694 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.207727 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.208548 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.208602 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.208727 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.209222 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.209756 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.229388 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fltnf\" (UniqueName: \"kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf\") pod \"dnsmasq-dns-cd5cbd7b9-zxdsl\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.283194 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:41 crc kubenswrapper[4576]: I1203 09:15:41.924771 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:15:42 crc kubenswrapper[4576]: E1203 09:15:42.362323 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod031dd78c_4b11_401e_a442_f6824ded6e7d.slice/crio-conmon-9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:15:42 crc kubenswrapper[4576]: I1203 09:15:42.765863 4576 generic.go:334] "Generic (PLEG): container finished" podID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerID="9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d" exitCode=0 Dec 03 09:15:42 crc kubenswrapper[4576]: I1203 09:15:42.767845 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" event={"ID":"031dd78c-4b11-401e-a442-f6824ded6e7d","Type":"ContainerDied","Data":"9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d"} Dec 03 09:15:42 crc kubenswrapper[4576]: I1203 09:15:42.768006 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" event={"ID":"031dd78c-4b11-401e-a442-f6824ded6e7d","Type":"ContainerStarted","Data":"1e417014bb984973852fd2eb9516f58914a59c0b0f0dff4d55776cd5d7376095"} Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.106828 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.508918 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.509262 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="sg-core" containerID="cri-o://7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.509344 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="proxy-httpd" containerID="cri-o://ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.509405 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-central-agent" containerID="cri-o://8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.509384 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-notification-agent" containerID="cri-o://c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.544317 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.610843 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": read tcp 10.217.0.2:52726->10.217.0.196:3000: read: connection reset by peer" Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.790345 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" event={"ID":"031dd78c-4b11-401e-a442-f6824ded6e7d","Type":"ContainerStarted","Data":"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355"} Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.790976 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.799888 4576 generic.go:334] "Generic (PLEG): container finished" podID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerID="ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f" exitCode=0 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.799962 4576 generic.go:334] "Generic (PLEG): container finished" podID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerID="7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e" exitCode=2 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.800205 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-log" containerID="cri-o://aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.800506 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerDied","Data":"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f"} Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.800571 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerDied","Data":"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e"} Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.800639 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-api" containerID="cri-o://d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a" gracePeriod=30 Dec 03 09:15:43 crc kubenswrapper[4576]: I1203 09:15:43.820938 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" podStartSLOduration=3.82091987 podStartE2EDuration="3.82091987s" podCreationTimestamp="2025-12-03 09:15:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:43.812546941 +0000 UTC m=+2151.198523945" watchObservedRunningTime="2025-12-03 09:15:43.82091987 +0000 UTC m=+2151.206896854" Dec 03 09:15:44 crc kubenswrapper[4576]: I1203 09:15:44.812786 4576 generic.go:334] "Generic (PLEG): container finished" podID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerID="8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034" exitCode=0 Dec 03 09:15:44 crc kubenswrapper[4576]: I1203 09:15:44.812883 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerDied","Data":"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034"} Dec 03 09:15:44 crc kubenswrapper[4576]: I1203 09:15:44.814979 4576 generic.go:334] "Generic (PLEG): container finished" podID="7664b50e-8727-4498-891e-1950d78221f4" containerID="aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6" exitCode=143 Dec 03 09:15:44 crc kubenswrapper[4576]: I1203 09:15:44.815062 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerDied","Data":"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6"} Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.474501 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.652923 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653026 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653076 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653121 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653150 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brjph\" (UniqueName: \"kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653173 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653263 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.653331 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle\") pod \"32ff8575-080b-4631-865b-2e9cb4d2c776\" (UID: \"32ff8575-080b-4631-865b-2e9cb4d2c776\") " Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.654999 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.655671 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.664671 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph" (OuterVolumeSpecName: "kube-api-access-brjph") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "kube-api-access-brjph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.665034 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts" (OuterVolumeSpecName: "scripts") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.689875 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.727614 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757774 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757816 4576 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757827 4576 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757837 4576 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757846 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brjph\" (UniqueName: \"kubernetes.io/projected/32ff8575-080b-4631-865b-2e9cb4d2c776-kube-api-access-brjph\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.757853 4576 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ff8575-080b-4631-865b-2e9cb4d2c776-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.775358 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.794319 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data" (OuterVolumeSpecName: "config-data") pod "32ff8575-080b-4631-865b-2e9cb4d2c776" (UID: "32ff8575-080b-4631-865b-2e9cb4d2c776"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.835652 4576 generic.go:334] "Generic (PLEG): container finished" podID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerID="c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626" exitCode=0 Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.835708 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.835719 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerDied","Data":"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626"} Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.835756 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32ff8575-080b-4631-865b-2e9cb4d2c776","Type":"ContainerDied","Data":"62b27011aedd5345fcfe707638045784fbba4bdb8d80ce228732918373197c34"} Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.835773 4576 scope.go:117] "RemoveContainer" containerID="ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.862772 4576 scope.go:117] "RemoveContainer" containerID="7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.864783 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.864818 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ff8575-080b-4631-865b-2e9cb4d2c776-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.887743 4576 scope.go:117] "RemoveContainer" containerID="c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.913881 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.920983 4576 scope.go:117] "RemoveContainer" containerID="8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.930836 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.942167 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.942801 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-notification-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.942875 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-notification-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.942958 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="sg-core" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943005 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="sg-core" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.943064 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="proxy-httpd" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943109 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="proxy-httpd" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.943162 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-central-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943210 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-central-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943671 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="proxy-httpd" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943783 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-notification-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943852 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="ceilometer-central-agent" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.943917 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" containerName="sg-core" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.944367 4576 scope.go:117] "RemoveContainer" containerID="ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.946077 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.948281 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f\": container with ID starting with ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f not found: ID does not exist" containerID="ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.948420 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f"} err="failed to get container status \"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f\": rpc error: code = NotFound desc = could not find container \"ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f\": container with ID starting with ba1519cfb9fc007aeeef84dfc11059551e1ebaae6f527698bfb5ed09b9dc863f not found: ID does not exist" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.948540 4576 scope.go:117] "RemoveContainer" containerID="7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.950048 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e\": container with ID starting with 7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e not found: ID does not exist" containerID="7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.950082 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e"} err="failed to get container status \"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e\": rpc error: code = NotFound desc = could not find container \"7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e\": container with ID starting with 7d6a6a56febe2eee35f223a3b2e341c0fd24c9a2572eec3603a82572dc03632e not found: ID does not exist" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.950103 4576 scope.go:117] "RemoveContainer" containerID="c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.950908 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626\": container with ID starting with c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626 not found: ID does not exist" containerID="c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.950935 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626"} err="failed to get container status \"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626\": rpc error: code = NotFound desc = could not find container \"c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626\": container with ID starting with c0217be089f06393f67eaa770e7ccc9d88507aa7564214c5f0f974c5c7b6f626 not found: ID does not exist" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.950954 4576 scope.go:117] "RemoveContainer" containerID="8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034" Dec 03 09:15:46 crc kubenswrapper[4576]: E1203 09:15:46.951176 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034\": container with ID starting with 8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034 not found: ID does not exist" containerID="8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.951278 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034"} err="failed to get container status \"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034\": rpc error: code = NotFound desc = could not find container \"8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034\": container with ID starting with 8bbf9819979e017cd30b837c3663c822ed1fabfd0f60e564f739e120416d8034 not found: ID does not exist" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.952581 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.952917 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.953081 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.953312 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980502 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-log-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980852 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980881 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-scripts\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980906 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2t84\" (UniqueName: \"kubernetes.io/projected/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-kube-api-access-l2t84\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980927 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-config-data\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.980983 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-run-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.981047 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:46 crc kubenswrapper[4576]: I1203 09:15:46.981081 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.082846 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-config-data\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083138 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-run-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083327 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083444 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083653 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-log-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083772 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083864 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-scripts\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.083975 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2t84\" (UniqueName: \"kubernetes.io/projected/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-kube-api-access-l2t84\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.085547 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-run-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.086814 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-log-httpd\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.089229 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.091051 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.092915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-scripts\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.093093 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.095898 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-config-data\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.105616 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2t84\" (UniqueName: \"kubernetes.io/projected/cba79dc3-6e8e-465e-a3f9-9e2fd67972af-kube-api-access-l2t84\") pod \"ceilometer-0\" (UID: \"cba79dc3-6e8e-465e-a3f9-9e2fd67972af\") " pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.277640 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.393836 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.492132 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle\") pod \"7664b50e-8727-4498-891e-1950d78221f4\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.493013 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data\") pod \"7664b50e-8727-4498-891e-1950d78221f4\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.537631 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data" (OuterVolumeSpecName: "config-data") pod "7664b50e-8727-4498-891e-1950d78221f4" (UID: "7664b50e-8727-4498-891e-1950d78221f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.544631 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7664b50e-8727-4498-891e-1950d78221f4" (UID: "7664b50e-8727-4498-891e-1950d78221f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.594391 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs\") pod \"7664b50e-8727-4498-891e-1950d78221f4\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.594647 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg9w9\" (UniqueName: \"kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9\") pod \"7664b50e-8727-4498-891e-1950d78221f4\" (UID: \"7664b50e-8727-4498-891e-1950d78221f4\") " Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.595003 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs" (OuterVolumeSpecName: "logs") pod "7664b50e-8727-4498-891e-1950d78221f4" (UID: "7664b50e-8727-4498-891e-1950d78221f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.595306 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.595324 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7664b50e-8727-4498-891e-1950d78221f4-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.595335 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7664b50e-8727-4498-891e-1950d78221f4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.600082 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9" (OuterVolumeSpecName: "kube-api-access-vg9w9") pod "7664b50e-8727-4498-891e-1950d78221f4" (UID: "7664b50e-8727-4498-891e-1950d78221f4"). InnerVolumeSpecName "kube-api-access-vg9w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.697405 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ff8575-080b-4631-865b-2e9cb4d2c776" path="/var/lib/kubelet/pods/32ff8575-080b-4631-865b-2e9cb4d2c776/volumes" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.699288 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg9w9\" (UniqueName: \"kubernetes.io/projected/7664b50e-8727-4498-891e-1950d78221f4-kube-api-access-vg9w9\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.786982 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.851623 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cba79dc3-6e8e-465e-a3f9-9e2fd67972af","Type":"ContainerStarted","Data":"913ab899c7664769646000bd21c29e9fa9562a400b1a744c51e6de654ffd60a9"} Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.853792 4576 generic.go:334] "Generic (PLEG): container finished" podID="7664b50e-8727-4498-891e-1950d78221f4" containerID="d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a" exitCode=0 Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.853916 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerDied","Data":"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a"} Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.853947 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7664b50e-8727-4498-891e-1950d78221f4","Type":"ContainerDied","Data":"1cf30f7044356cd32c077aaded1fa8ed32d09d7673003d0b358c0deb80455b5e"} Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.853983 4576 scope.go:117] "RemoveContainer" containerID="d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.854066 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.876753 4576 scope.go:117] "RemoveContainer" containerID="aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.879644 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.901803 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.912237 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:47 crc kubenswrapper[4576]: E1203 09:15:47.912710 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-log" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.912729 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-log" Dec 03 09:15:47 crc kubenswrapper[4576]: E1203 09:15:47.912753 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-api" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.912760 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-api" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.912969 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-api" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.913006 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="7664b50e-8727-4498-891e-1950d78221f4" containerName="nova-api-log" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.914600 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.918689 4576 scope.go:117] "RemoveContainer" containerID="d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a" Dec 03 09:15:47 crc kubenswrapper[4576]: E1203 09:15:47.920684 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a\": container with ID starting with d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a not found: ID does not exist" containerID="d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.920728 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a"} err="failed to get container status \"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a\": rpc error: code = NotFound desc = could not find container \"d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a\": container with ID starting with d614b432afa11510b2c5cdffe4335ce969d869c5b263970eca36797c1737ce3a not found: ID does not exist" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.920838 4576 scope.go:117] "RemoveContainer" containerID="aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.923967 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.924166 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.924294 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 09:15:47 crc kubenswrapper[4576]: E1203 09:15:47.928305 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6\": container with ID starting with aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6 not found: ID does not exist" containerID="aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.928344 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6"} err="failed to get container status \"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6\": rpc error: code = NotFound desc = could not find container \"aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6\": container with ID starting with aa110946c1d0e2ade0e4f419daa1af85fc426c5494eb20db53b0991b5835a0f6 not found: ID does not exist" Dec 03 09:15:47 crc kubenswrapper[4576]: I1203 09:15:47.941003 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004609 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004663 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxlqw\" (UniqueName: \"kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004752 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004781 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004802 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.004835 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109599 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109667 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109719 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109770 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109794 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxlqw\" (UniqueName: \"kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.109881 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.118554 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.119103 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.120034 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.140687 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.147173 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.157188 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.163926 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxlqw\" (UniqueName: \"kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw\") pod \"nova-api-0\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.183965 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.246587 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.848297 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:48 crc kubenswrapper[4576]: W1203 09:15:48.866183 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54731e6f_6a04_4526_8031_76ee51a20ea5.slice/crio-4869b3bf3e86e1f70bea21df5a40f87ecdf298fa41bbf9e3a865feb49e6b478a WatchSource:0}: Error finding container 4869b3bf3e86e1f70bea21df5a40f87ecdf298fa41bbf9e3a865feb49e6b478a: Status 404 returned error can't find the container with id 4869b3bf3e86e1f70bea21df5a40f87ecdf298fa41bbf9e3a865feb49e6b478a Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.872209 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cba79dc3-6e8e-465e-a3f9-9e2fd67972af","Type":"ContainerStarted","Data":"8e45cec8d106eb887a9ce3f18f7c3d0e6a485ddb147075010107124de070b8e4"} Dec 03 09:15:48 crc kubenswrapper[4576]: I1203 09:15:48.895984 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.074277 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-wgjr9"] Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.075842 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.078674 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.078838 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.083511 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wgjr9"] Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.237788 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.237873 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nc4t\" (UniqueName: \"kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.237997 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.238123 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.340165 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.340276 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.340311 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nc4t\" (UniqueName: \"kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.340366 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.345509 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.352935 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.359023 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.378172 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nc4t\" (UniqueName: \"kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t\") pod \"nova-cell1-cell-mapping-wgjr9\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.405320 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.690443 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7664b50e-8727-4498-891e-1950d78221f4" path="/var/lib/kubelet/pods/7664b50e-8727-4498-891e-1950d78221f4/volumes" Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.915916 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerStarted","Data":"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af"} Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.915963 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerStarted","Data":"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f"} Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.915976 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerStarted","Data":"4869b3bf3e86e1f70bea21df5a40f87ecdf298fa41bbf9e3a865feb49e6b478a"} Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.924291 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cba79dc3-6e8e-465e-a3f9-9e2fd67972af","Type":"ContainerStarted","Data":"e42c5fa53b2a285746baf29e3f2280951eb4572bbaaf311aa37be68a1d3e9a70"} Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.952860 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wgjr9"] Dec 03 09:15:49 crc kubenswrapper[4576]: I1203 09:15:49.958985 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.9589645300000003 podStartE2EDuration="2.95896453s" podCreationTimestamp="2025-12-03 09:15:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:49.936488347 +0000 UTC m=+2157.322465341" watchObservedRunningTime="2025-12-03 09:15:49.95896453 +0000 UTC m=+2157.344941514" Dec 03 09:15:50 crc kubenswrapper[4576]: I1203 09:15:50.936641 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cba79dc3-6e8e-465e-a3f9-9e2fd67972af","Type":"ContainerStarted","Data":"bffa817175c60d4965b4380231e50f778aa783e054781473cff8b6dc92d88178"} Dec 03 09:15:50 crc kubenswrapper[4576]: I1203 09:15:50.938373 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wgjr9" event={"ID":"2cdcf30e-382e-4530-95ea-1590dbe7d7d1","Type":"ContainerStarted","Data":"ce271073fdd305620d609d731cdfc7425998214290f4e3d5d73fa66bccd0efd7"} Dec 03 09:15:50 crc kubenswrapper[4576]: I1203 09:15:50.938434 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wgjr9" event={"ID":"2cdcf30e-382e-4530-95ea-1590dbe7d7d1","Type":"ContainerStarted","Data":"464ff9d86742a960638fa858120ff55241471667961484ffc5c76016772b03ac"} Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.292093 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.346325 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-wgjr9" podStartSLOduration=2.346301654 podStartE2EDuration="2.346301654s" podCreationTimestamp="2025-12-03 09:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:15:50.95888717 +0000 UTC m=+2158.344864154" watchObservedRunningTime="2025-12-03 09:15:51.346301654 +0000 UTC m=+2158.732278638" Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.466900 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.467171 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="dnsmasq-dns" containerID="cri-o://bf428d02dad0cbf5a0d575b860850cf79216c7c771ea996a1bdc4149fdd4eb2d" gracePeriod=10 Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.955833 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cba79dc3-6e8e-465e-a3f9-9e2fd67972af","Type":"ContainerStarted","Data":"2c7f1f26e4093089f466ca8e56acaef627bd8679c7a956605dda6b8c33e04dc2"} Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.956600 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.960555 4576 generic.go:334] "Generic (PLEG): container finished" podID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerID="bf428d02dad0cbf5a0d575b860850cf79216c7c771ea996a1bdc4149fdd4eb2d" exitCode=0 Dec 03 09:15:51 crc kubenswrapper[4576]: I1203 09:15:51.960740 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" event={"ID":"7b4f90af-69a8-42f0-a715-ea40baaec5c3","Type":"ContainerDied","Data":"bf428d02dad0cbf5a0d575b860850cf79216c7c771ea996a1bdc4149fdd4eb2d"} Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:51.995032 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.527803161 podStartE2EDuration="5.995010848s" podCreationTimestamp="2025-12-03 09:15:46 +0000 UTC" firstStartedPulling="2025-12-03 09:15:47.796997236 +0000 UTC m=+2155.182974220" lastFinishedPulling="2025-12-03 09:15:51.264204913 +0000 UTC m=+2158.650181907" observedRunningTime="2025-12-03 09:15:51.979022291 +0000 UTC m=+2159.364999275" watchObservedRunningTime="2025-12-03 09:15:51.995010848 +0000 UTC m=+2159.380987822" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.086321 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198355 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198486 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwbx8\" (UniqueName: \"kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198639 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198673 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198760 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.198841 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc\") pod \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\" (UID: \"7b4f90af-69a8-42f0-a715-ea40baaec5c3\") " Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.210452 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8" (OuterVolumeSpecName: "kube-api-access-fwbx8") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "kube-api-access-fwbx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.250158 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.269920 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.290200 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config" (OuterVolumeSpecName: "config") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.300975 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.301002 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.301012 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwbx8\" (UniqueName: \"kubernetes.io/projected/7b4f90af-69a8-42f0-a715-ea40baaec5c3-kube-api-access-fwbx8\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.301021 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.303027 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.304321 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7b4f90af-69a8-42f0-a715-ea40baaec5c3" (UID: "7b4f90af-69a8-42f0-a715-ea40baaec5c3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.402874 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.403413 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7b4f90af-69a8-42f0-a715-ea40baaec5c3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.995062 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" event={"ID":"7b4f90af-69a8-42f0-a715-ea40baaec5c3","Type":"ContainerDied","Data":"ac81b8a934ab1ccdbff8c8f20223f54976989ed1c20c65d573ea06afec696461"} Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.995139 4576 scope.go:117] "RemoveContainer" containerID="bf428d02dad0cbf5a0d575b860850cf79216c7c771ea996a1bdc4149fdd4eb2d" Dec 03 09:15:52 crc kubenswrapper[4576]: I1203 09:15:52.997378 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-rzzhl" Dec 03 09:15:53 crc kubenswrapper[4576]: I1203 09:15:53.032780 4576 scope.go:117] "RemoveContainer" containerID="7a614826ad27b888bf2098152d98bd3bdfc1f1d2f1f80a0cfb81fcf4b1d7cfdd" Dec 03 09:15:53 crc kubenswrapper[4576]: I1203 09:15:53.083629 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:15:53 crc kubenswrapper[4576]: I1203 09:15:53.090465 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-rzzhl"] Dec 03 09:15:53 crc kubenswrapper[4576]: I1203 09:15:53.690141 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" path="/var/lib/kubelet/pods/7b4f90af-69a8-42f0-a715-ea40baaec5c3/volumes" Dec 03 09:15:56 crc kubenswrapper[4576]: I1203 09:15:56.026432 4576 generic.go:334] "Generic (PLEG): container finished" podID="2cdcf30e-382e-4530-95ea-1590dbe7d7d1" containerID="ce271073fdd305620d609d731cdfc7425998214290f4e3d5d73fa66bccd0efd7" exitCode=0 Dec 03 09:15:56 crc kubenswrapper[4576]: I1203 09:15:56.026500 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wgjr9" event={"ID":"2cdcf30e-382e-4530-95ea-1590dbe7d7d1","Type":"ContainerDied","Data":"ce271073fdd305620d609d731cdfc7425998214290f4e3d5d73fa66bccd0efd7"} Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.457797 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.635631 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts\") pod \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.635880 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data\") pod \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.636792 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle\") pod \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.636843 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nc4t\" (UniqueName: \"kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t\") pod \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\" (UID: \"2cdcf30e-382e-4530-95ea-1590dbe7d7d1\") " Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.642800 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts" (OuterVolumeSpecName: "scripts") pod "2cdcf30e-382e-4530-95ea-1590dbe7d7d1" (UID: "2cdcf30e-382e-4530-95ea-1590dbe7d7d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.648149 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t" (OuterVolumeSpecName: "kube-api-access-5nc4t") pod "2cdcf30e-382e-4530-95ea-1590dbe7d7d1" (UID: "2cdcf30e-382e-4530-95ea-1590dbe7d7d1"). InnerVolumeSpecName "kube-api-access-5nc4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.676566 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data" (OuterVolumeSpecName: "config-data") pod "2cdcf30e-382e-4530-95ea-1590dbe7d7d1" (UID: "2cdcf30e-382e-4530-95ea-1590dbe7d7d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.681003 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2cdcf30e-382e-4530-95ea-1590dbe7d7d1" (UID: "2cdcf30e-382e-4530-95ea-1590dbe7d7d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.741002 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.741057 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nc4t\" (UniqueName: \"kubernetes.io/projected/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-kube-api-access-5nc4t\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.741073 4576 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:57 crc kubenswrapper[4576]: I1203 09:15:57.741085 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdcf30e-382e-4530-95ea-1590dbe7d7d1-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.076229 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wgjr9" event={"ID":"2cdcf30e-382e-4530-95ea-1590dbe7d7d1","Type":"ContainerDied","Data":"464ff9d86742a960638fa858120ff55241471667961484ffc5c76016772b03ac"} Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.076626 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="464ff9d86742a960638fa858120ff55241471667961484ffc5c76016772b03ac" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.076472 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wgjr9" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.233196 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.233507 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="59e9e04f-48ad-432a-bef6-16751a2eaf90" containerName="nova-scheduler-scheduler" containerID="cri-o://544f1a202da33a0848881e8d66e3794a51977a210446fc41d52de7bd059d711a" gracePeriod=30 Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.247012 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.247073 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.247272 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-log" containerID="cri-o://d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" gracePeriod=30 Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.247383 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.247396 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-api" containerID="cri-o://79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" gracePeriod=30 Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.255921 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": EOF" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.257336 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": EOF" Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.272499 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.272933 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-log" containerID="cri-o://1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8" gracePeriod=30 Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.273285 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-metadata" containerID="cri-o://bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788" gracePeriod=30 Dec 03 09:15:58 crc kubenswrapper[4576]: I1203 09:15:58.984006 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.087052 4576 generic.go:334] "Generic (PLEG): container finished" podID="10c796b7-b92d-4512-828a-59e082e97b16" containerID="1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8" exitCode=143 Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.087137 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerDied","Data":"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8"} Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.089920 4576 generic.go:334] "Generic (PLEG): container finished" podID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerID="79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" exitCode=0 Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.089958 4576 generic.go:334] "Generic (PLEG): container finished" podID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerID="d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" exitCode=143 Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.089968 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerDied","Data":"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af"} Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.089995 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.090005 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerDied","Data":"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f"} Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.090017 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54731e6f-6a04-4526-8031-76ee51a20ea5","Type":"ContainerDied","Data":"4869b3bf3e86e1f70bea21df5a40f87ecdf298fa41bbf9e3a865feb49e6b478a"} Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.090034 4576 scope.go:117] "RemoveContainer" containerID="79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.121324 4576 scope.go:117] "RemoveContainer" containerID="d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.143808 4576 scope.go:117] "RemoveContainer" containerID="79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.144220 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af\": container with ID starting with 79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af not found: ID does not exist" containerID="79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.144252 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af"} err="failed to get container status \"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af\": rpc error: code = NotFound desc = could not find container \"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af\": container with ID starting with 79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af not found: ID does not exist" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.144272 4576 scope.go:117] "RemoveContainer" containerID="d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.144511 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f\": container with ID starting with d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f not found: ID does not exist" containerID="d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.144564 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f"} err="failed to get container status \"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f\": rpc error: code = NotFound desc = could not find container \"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f\": container with ID starting with d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f not found: ID does not exist" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.144583 4576 scope.go:117] "RemoveContainer" containerID="79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.145025 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af"} err="failed to get container status \"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af\": rpc error: code = NotFound desc = could not find container \"79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af\": container with ID starting with 79b1a94308d6acf4571b8663004a23d399f8e722c4a22634e04160763d4d93af not found: ID does not exist" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.145044 4576 scope.go:117] "RemoveContainer" containerID="d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.145401 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f"} err="failed to get container status \"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f\": rpc error: code = NotFound desc = could not find container \"d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f\": container with ID starting with d7a6bb829d17d5bf5886bd5621f72f0969afb6812e58981e180186a32b895c6f not found: ID does not exist" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180203 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxlqw\" (UniqueName: \"kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180275 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180308 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180395 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180414 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.180433 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data\") pod \"54731e6f-6a04-4526-8031-76ee51a20ea5\" (UID: \"54731e6f-6a04-4526-8031-76ee51a20ea5\") " Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.181314 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs" (OuterVolumeSpecName: "logs") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.185672 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw" (OuterVolumeSpecName: "kube-api-access-kxlqw") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "kube-api-access-kxlqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.217075 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.220020 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data" (OuterVolumeSpecName: "config-data") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.255518 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.255711 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "54731e6f-6a04-4526-8031-76ee51a20ea5" (UID: "54731e6f-6a04-4526-8031-76ee51a20ea5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283003 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxlqw\" (UniqueName: \"kubernetes.io/projected/54731e6f-6a04-4526-8031-76ee51a20ea5-kube-api-access-kxlqw\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283043 4576 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283055 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54731e6f-6a04-4526-8031-76ee51a20ea5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283067 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283078 4576 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.283089 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54731e6f-6a04-4526-8031-76ee51a20ea5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.446316 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.455074 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.469852 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.470259 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="init" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470278 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="init" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.470293 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="dnsmasq-dns" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470299 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="dnsmasq-dns" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.470315 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cdcf30e-382e-4530-95ea-1590dbe7d7d1" containerName="nova-manage" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470321 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cdcf30e-382e-4530-95ea-1590dbe7d7d1" containerName="nova-manage" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.470328 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-log" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470333 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-log" Dec 03 09:15:59 crc kubenswrapper[4576]: E1203 09:15:59.470351 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-api" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470357 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-api" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470542 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-api" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470556 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4f90af-69a8-42f0-a715-ea40baaec5c3" containerName="dnsmasq-dns" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470567 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" containerName="nova-api-log" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.470580 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cdcf30e-382e-4530-95ea-1590dbe7d7d1" containerName="nova-manage" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.471493 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.474203 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.474203 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.474905 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487457 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhwss\" (UniqueName: \"kubernetes.io/projected/b02586c5-f716-48ea-bc4a-fefa33df684e-kube-api-access-fhwss\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487550 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b02586c5-f716-48ea-bc4a-fefa33df684e-logs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-public-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487602 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487629 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.487666 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-config-data\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.498357 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589094 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhwss\" (UniqueName: \"kubernetes.io/projected/b02586c5-f716-48ea-bc4a-fefa33df684e-kube-api-access-fhwss\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589186 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b02586c5-f716-48ea-bc4a-fefa33df684e-logs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589222 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-public-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589242 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589284 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589327 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-config-data\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.589660 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b02586c5-f716-48ea-bc4a-fefa33df684e-logs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.593181 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.599918 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-config-data\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.602464 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-public-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.604261 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b02586c5-f716-48ea-bc4a-fefa33df684e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.605135 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhwss\" (UniqueName: \"kubernetes.io/projected/b02586c5-f716-48ea-bc4a-fefa33df684e-kube-api-access-fhwss\") pod \"nova-api-0\" (UID: \"b02586c5-f716-48ea-bc4a-fefa33df684e\") " pod="openstack/nova-api-0" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.688500 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54731e6f-6a04-4526-8031-76ee51a20ea5" path="/var/lib/kubelet/pods/54731e6f-6a04-4526-8031-76ee51a20ea5/volumes" Dec 03 09:15:59 crc kubenswrapper[4576]: I1203 09:15:59.785581 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.102113 4576 generic.go:334] "Generic (PLEG): container finished" podID="59e9e04f-48ad-432a-bef6-16751a2eaf90" containerID="544f1a202da33a0848881e8d66e3794a51977a210446fc41d52de7bd059d711a" exitCode=0 Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.102479 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"59e9e04f-48ad-432a-bef6-16751a2eaf90","Type":"ContainerDied","Data":"544f1a202da33a0848881e8d66e3794a51977a210446fc41d52de7bd059d711a"} Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.254988 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.255273 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:16:00 crc kubenswrapper[4576]: W1203 09:16:00.258701 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb02586c5_f716_48ea_bc4a_fefa33df684e.slice/crio-8323fb1e8c154c9faeed9b38a5e6a0102245dae1b2f114135700a4199fed96b5 WatchSource:0}: Error finding container 8323fb1e8c154c9faeed9b38a5e6a0102245dae1b2f114135700a4199fed96b5: Status 404 returned error can't find the container with id 8323fb1e8c154c9faeed9b38a5e6a0102245dae1b2f114135700a4199fed96b5 Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.407691 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data\") pod \"59e9e04f-48ad-432a-bef6-16751a2eaf90\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.407872 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gbg2\" (UniqueName: \"kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2\") pod \"59e9e04f-48ad-432a-bef6-16751a2eaf90\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.408038 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle\") pod \"59e9e04f-48ad-432a-bef6-16751a2eaf90\" (UID: \"59e9e04f-48ad-432a-bef6-16751a2eaf90\") " Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.416632 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2" (OuterVolumeSpecName: "kube-api-access-7gbg2") pod "59e9e04f-48ad-432a-bef6-16751a2eaf90" (UID: "59e9e04f-48ad-432a-bef6-16751a2eaf90"). InnerVolumeSpecName "kube-api-access-7gbg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.447105 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data" (OuterVolumeSpecName: "config-data") pod "59e9e04f-48ad-432a-bef6-16751a2eaf90" (UID: "59e9e04f-48ad-432a-bef6-16751a2eaf90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.450949 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59e9e04f-48ad-432a-bef6-16751a2eaf90" (UID: "59e9e04f-48ad-432a-bef6-16751a2eaf90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.509957 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.510148 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gbg2\" (UniqueName: \"kubernetes.io/projected/59e9e04f-48ad-432a-bef6-16751a2eaf90-kube-api-access-7gbg2\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:00 crc kubenswrapper[4576]: I1203 09:16:00.510205 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e9e04f-48ad-432a-bef6-16751a2eaf90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.121895 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.122630 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"59e9e04f-48ad-432a-bef6-16751a2eaf90","Type":"ContainerDied","Data":"4de2775d640469d6b6f4eacd2e55a347fa820c2bd960c92b643e97681fd662cf"} Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.122691 4576 scope.go:117] "RemoveContainer" containerID="544f1a202da33a0848881e8d66e3794a51977a210446fc41d52de7bd059d711a" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.131015 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b02586c5-f716-48ea-bc4a-fefa33df684e","Type":"ContainerStarted","Data":"abcac642f0db0097a3714cddf5a89d7337103c025566e54125e8d9253de01673"} Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.131065 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b02586c5-f716-48ea-bc4a-fefa33df684e","Type":"ContainerStarted","Data":"665ea0512efa2f6b528b5998b1159e18b1e882ea064d84cb224244efd615ee5c"} Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.131077 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b02586c5-f716-48ea-bc4a-fefa33df684e","Type":"ContainerStarted","Data":"8323fb1e8c154c9faeed9b38a5e6a0102245dae1b2f114135700a4199fed96b5"} Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.177707 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.177683732 podStartE2EDuration="2.177683732s" podCreationTimestamp="2025-12-03 09:15:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:16:01.170597589 +0000 UTC m=+2168.556574573" watchObservedRunningTime="2025-12-03 09:16:01.177683732 +0000 UTC m=+2168.563660726" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.200047 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.213276 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.225375 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:16:01 crc kubenswrapper[4576]: E1203 09:16:01.225965 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e9e04f-48ad-432a-bef6-16751a2eaf90" containerName="nova-scheduler-scheduler" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.225991 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e9e04f-48ad-432a-bef6-16751a2eaf90" containerName="nova-scheduler-scheduler" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.226235 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e9e04f-48ad-432a-bef6-16751a2eaf90" containerName="nova-scheduler-scheduler" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.227098 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.230966 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.251455 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.325422 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-config-data\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.325469 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.325626 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwxts\" (UniqueName: \"kubernetes.io/projected/60431290-c470-429d-938a-588668bb2887-kube-api-access-jwxts\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.427696 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-config-data\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.427762 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.427847 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwxts\" (UniqueName: \"kubernetes.io/projected/60431290-c470-429d-938a-588668bb2887-kube-api-access-jwxts\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.433343 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.436099 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60431290-c470-429d-938a-588668bb2887-config-data\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.447740 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwxts\" (UniqueName: \"kubernetes.io/projected/60431290-c470-429d-938a-588668bb2887-kube-api-access-jwxts\") pod \"nova-scheduler-0\" (UID: \"60431290-c470-429d-938a-588668bb2887\") " pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.584900 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.693718 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59e9e04f-48ad-432a-bef6-16751a2eaf90" path="/var/lib/kubelet/pods/59e9e04f-48ad-432a-bef6-16751a2eaf90/volumes" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.824811 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.936135 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ptl4\" (UniqueName: \"kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4\") pod \"10c796b7-b92d-4512-828a-59e082e97b16\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.936190 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs\") pod \"10c796b7-b92d-4512-828a-59e082e97b16\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.936229 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs\") pod \"10c796b7-b92d-4512-828a-59e082e97b16\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.936314 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle\") pod \"10c796b7-b92d-4512-828a-59e082e97b16\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.936349 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data\") pod \"10c796b7-b92d-4512-828a-59e082e97b16\" (UID: \"10c796b7-b92d-4512-828a-59e082e97b16\") " Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.942679 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs" (OuterVolumeSpecName: "logs") pod "10c796b7-b92d-4512-828a-59e082e97b16" (UID: "10c796b7-b92d-4512-828a-59e082e97b16"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.949989 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4" (OuterVolumeSpecName: "kube-api-access-4ptl4") pod "10c796b7-b92d-4512-828a-59e082e97b16" (UID: "10c796b7-b92d-4512-828a-59e082e97b16"). InnerVolumeSpecName "kube-api-access-4ptl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.972465 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data" (OuterVolumeSpecName: "config-data") pod "10c796b7-b92d-4512-828a-59e082e97b16" (UID: "10c796b7-b92d-4512-828a-59e082e97b16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:01 crc kubenswrapper[4576]: I1203 09:16:01.986118 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10c796b7-b92d-4512-828a-59e082e97b16" (UID: "10c796b7-b92d-4512-828a-59e082e97b16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.006689 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "10c796b7-b92d-4512-828a-59e082e97b16" (UID: "10c796b7-b92d-4512-828a-59e082e97b16"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.039799 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ptl4\" (UniqueName: \"kubernetes.io/projected/10c796b7-b92d-4512-828a-59e082e97b16-kube-api-access-4ptl4\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.039834 4576 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10c796b7-b92d-4512-828a-59e082e97b16-logs\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.039845 4576 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.039861 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.039869 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c796b7-b92d-4512-828a-59e082e97b16-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.117409 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 09:16:02 crc kubenswrapper[4576]: W1203 09:16:02.120992 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60431290_c470_429d_938a_588668bb2887.slice/crio-97a980233e039d3a309c998d5c347ef28e6e1518889331c0ba0a014ba1acfa4d WatchSource:0}: Error finding container 97a980233e039d3a309c998d5c347ef28e6e1518889331c0ba0a014ba1acfa4d: Status 404 returned error can't find the container with id 97a980233e039d3a309c998d5c347ef28e6e1518889331c0ba0a014ba1acfa4d Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.145722 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60431290-c470-429d-938a-588668bb2887","Type":"ContainerStarted","Data":"97a980233e039d3a309c998d5c347ef28e6e1518889331c0ba0a014ba1acfa4d"} Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.150730 4576 generic.go:334] "Generic (PLEG): container finished" podID="10c796b7-b92d-4512-828a-59e082e97b16" containerID="bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788" exitCode=0 Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.150799 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerDied","Data":"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788"} Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.150828 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10c796b7-b92d-4512-828a-59e082e97b16","Type":"ContainerDied","Data":"1397c7b38bce2c35a178c42295b9a818cd643fd8ee0e822a05368a582fc272ca"} Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.150847 4576 scope.go:117] "RemoveContainer" containerID="bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.150948 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.197077 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.197119 4576 scope.go:117] "RemoveContainer" containerID="1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.215379 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.223677 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:16:02 crc kubenswrapper[4576]: E1203 09:16:02.224120 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-metadata" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.224140 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-metadata" Dec 03 09:16:02 crc kubenswrapper[4576]: E1203 09:16:02.224167 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-log" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.224174 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-log" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.224391 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-log" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.224416 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c796b7-b92d-4512-828a-59e082e97b16" containerName="nova-metadata-metadata" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.226856 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.229465 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.229680 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.237613 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.268776 4576 scope.go:117] "RemoveContainer" containerID="bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788" Dec 03 09:16:02 crc kubenswrapper[4576]: E1203 09:16:02.269584 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788\": container with ID starting with bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788 not found: ID does not exist" containerID="bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.269726 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788"} err="failed to get container status \"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788\": rpc error: code = NotFound desc = could not find container \"bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788\": container with ID starting with bddcfbfe3a0e81201ff2923414be5f731778142cbef05e14dfd9ebc926b29788 not found: ID does not exist" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.269747 4576 scope.go:117] "RemoveContainer" containerID="1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8" Dec 03 09:16:02 crc kubenswrapper[4576]: E1203 09:16:02.273824 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8\": container with ID starting with 1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8 not found: ID does not exist" containerID="1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.273887 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8"} err="failed to get container status \"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8\": rpc error: code = NotFound desc = could not find container \"1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8\": container with ID starting with 1bf4b2e6b08659aa36e2bf65a4d956c7def3d4eb1564917bce5524ac07e2d8b8 not found: ID does not exist" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.348692 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.348733 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.348764 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdhq8\" (UniqueName: \"kubernetes.io/projected/8dddc23a-4179-44b0-b145-a91ab3441703-kube-api-access-rdhq8\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.349022 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-config-data\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.349119 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dddc23a-4179-44b0-b145-a91ab3441703-logs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.451371 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.451728 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.451759 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdhq8\" (UniqueName: \"kubernetes.io/projected/8dddc23a-4179-44b0-b145-a91ab3441703-kube-api-access-rdhq8\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.451821 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-config-data\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.451853 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dddc23a-4179-44b0-b145-a91ab3441703-logs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.453076 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dddc23a-4179-44b0-b145-a91ab3441703-logs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.458608 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.458672 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.473799 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dddc23a-4179-44b0-b145-a91ab3441703-config-data\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.479885 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdhq8\" (UniqueName: \"kubernetes.io/projected/8dddc23a-4179-44b0-b145-a91ab3441703-kube-api-access-rdhq8\") pod \"nova-metadata-0\" (UID: \"8dddc23a-4179-44b0-b145-a91ab3441703\") " pod="openstack/nova-metadata-0" Dec 03 09:16:02 crc kubenswrapper[4576]: I1203 09:16:02.582447 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 09:16:03 crc kubenswrapper[4576]: I1203 09:16:03.040641 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 09:16:03 crc kubenswrapper[4576]: W1203 09:16:03.048970 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8dddc23a_4179_44b0_b145_a91ab3441703.slice/crio-200a80200f49a696239bcd54e90e1c0302b9335623202a68954b2ba5abc43225 WatchSource:0}: Error finding container 200a80200f49a696239bcd54e90e1c0302b9335623202a68954b2ba5abc43225: Status 404 returned error can't find the container with id 200a80200f49a696239bcd54e90e1c0302b9335623202a68954b2ba5abc43225 Dec 03 09:16:03 crc kubenswrapper[4576]: I1203 09:16:03.172755 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60431290-c470-429d-938a-588668bb2887","Type":"ContainerStarted","Data":"5e5582c3ae6ccb7414b60a7140a29a8844beeec199aa58e5dce12abe88ddface"} Dec 03 09:16:03 crc kubenswrapper[4576]: I1203 09:16:03.175278 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8dddc23a-4179-44b0-b145-a91ab3441703","Type":"ContainerStarted","Data":"200a80200f49a696239bcd54e90e1c0302b9335623202a68954b2ba5abc43225"} Dec 03 09:16:03 crc kubenswrapper[4576]: I1203 09:16:03.194850 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.194832845 podStartE2EDuration="2.194832845s" podCreationTimestamp="2025-12-03 09:16:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:16:03.193049887 +0000 UTC m=+2170.579026871" watchObservedRunningTime="2025-12-03 09:16:03.194832845 +0000 UTC m=+2170.580809829" Dec 03 09:16:03 crc kubenswrapper[4576]: I1203 09:16:03.690704 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c796b7-b92d-4512-828a-59e082e97b16" path="/var/lib/kubelet/pods/10c796b7-b92d-4512-828a-59e082e97b16/volumes" Dec 03 09:16:04 crc kubenswrapper[4576]: I1203 09:16:04.186831 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8dddc23a-4179-44b0-b145-a91ab3441703","Type":"ContainerStarted","Data":"aa07e4dd97029bb134652c47cff41abc4370307ff772091bdb84f29afc42a246"} Dec 03 09:16:04 crc kubenswrapper[4576]: I1203 09:16:04.186878 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8dddc23a-4179-44b0-b145-a91ab3441703","Type":"ContainerStarted","Data":"64b927f00512f7908820c66fe1827410cead4c2c5cb80a3d67f357be331744bf"} Dec 03 09:16:04 crc kubenswrapper[4576]: I1203 09:16:04.252819 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.252791389 podStartE2EDuration="2.252791389s" podCreationTimestamp="2025-12-03 09:16:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:16:04.209026445 +0000 UTC m=+2171.595003439" watchObservedRunningTime="2025-12-03 09:16:04.252791389 +0000 UTC m=+2171.638768393" Dec 03 09:16:06 crc kubenswrapper[4576]: I1203 09:16:06.585961 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 09:16:07 crc kubenswrapper[4576]: I1203 09:16:07.583945 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:16:07 crc kubenswrapper[4576]: I1203 09:16:07.584076 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 09:16:09 crc kubenswrapper[4576]: I1203 09:16:09.786663 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:16:09 crc kubenswrapper[4576]: I1203 09:16:09.787047 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 09:16:10 crc kubenswrapper[4576]: I1203 09:16:10.802835 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b02586c5-f716-48ea-bc4a-fefa33df684e" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:16:10 crc kubenswrapper[4576]: I1203 09:16:10.802842 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b02586c5-f716-48ea-bc4a-fefa33df684e" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:16:11 crc kubenswrapper[4576]: I1203 09:16:11.586576 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 09:16:11 crc kubenswrapper[4576]: I1203 09:16:11.634089 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 09:16:12 crc kubenswrapper[4576]: I1203 09:16:12.422259 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 09:16:12 crc kubenswrapper[4576]: I1203 09:16:12.583553 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 09:16:12 crc kubenswrapper[4576]: I1203 09:16:12.583972 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 09:16:13 crc kubenswrapper[4576]: I1203 09:16:13.593711 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8dddc23a-4179-44b0-b145-a91ab3441703" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:16:13 crc kubenswrapper[4576]: I1203 09:16:13.593711 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8dddc23a-4179-44b0-b145-a91ab3441703" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 09:16:17 crc kubenswrapper[4576]: I1203 09:16:17.288150 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.593328 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.597026 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.639195 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.741162 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8z8l\" (UniqueName: \"kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.741475 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.741605 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.801122 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.801645 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.802783 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.810460 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.843800 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8z8l\" (UniqueName: \"kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.843929 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.844025 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.844446 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.845325 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.867293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8z8l\" (UniqueName: \"kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l\") pod \"certified-operators-qn27t\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:19 crc kubenswrapper[4576]: I1203 09:16:19.942719 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:20 crc kubenswrapper[4576]: I1203 09:16:20.438386 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 09:16:20 crc kubenswrapper[4576]: I1203 09:16:20.446261 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 09:16:20 crc kubenswrapper[4576]: W1203 09:16:20.614149 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0de047e_34a4_4a3e_aa97_d39bd43517e2.slice/crio-0e763e5a1d0372f2b35b9f30703c768c1d3785b4bc8169c31022aad0b037389d WatchSource:0}: Error finding container 0e763e5a1d0372f2b35b9f30703c768c1d3785b4bc8169c31022aad0b037389d: Status 404 returned error can't find the container with id 0e763e5a1d0372f2b35b9f30703c768c1d3785b4bc8169c31022aad0b037389d Dec 03 09:16:20 crc kubenswrapper[4576]: I1203 09:16:20.616483 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:21 crc kubenswrapper[4576]: I1203 09:16:21.449715 4576 generic.go:334] "Generic (PLEG): container finished" podID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerID="6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960" exitCode=0 Dec 03 09:16:21 crc kubenswrapper[4576]: I1203 09:16:21.449823 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerDied","Data":"6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960"} Dec 03 09:16:21 crc kubenswrapper[4576]: I1203 09:16:21.450067 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerStarted","Data":"0e763e5a1d0372f2b35b9f30703c768c1d3785b4bc8169c31022aad0b037389d"} Dec 03 09:16:22 crc kubenswrapper[4576]: I1203 09:16:22.462626 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerStarted","Data":"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc"} Dec 03 09:16:22 crc kubenswrapper[4576]: I1203 09:16:22.589313 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 09:16:22 crc kubenswrapper[4576]: I1203 09:16:22.611187 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 09:16:22 crc kubenswrapper[4576]: I1203 09:16:22.616960 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 09:16:23 crc kubenswrapper[4576]: I1203 09:16:23.479031 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 09:16:24 crc kubenswrapper[4576]: I1203 09:16:24.819387 4576 generic.go:334] "Generic (PLEG): container finished" podID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerID="3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc" exitCode=0 Dec 03 09:16:24 crc kubenswrapper[4576]: I1203 09:16:24.820601 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerDied","Data":"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc"} Dec 03 09:16:25 crc kubenswrapper[4576]: I1203 09:16:25.830369 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerStarted","Data":"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144"} Dec 03 09:16:25 crc kubenswrapper[4576]: I1203 09:16:25.863670 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qn27t" podStartSLOduration=2.958564468 podStartE2EDuration="6.863648116s" podCreationTimestamp="2025-12-03 09:16:19 +0000 UTC" firstStartedPulling="2025-12-03 09:16:21.451483679 +0000 UTC m=+2188.837460663" lastFinishedPulling="2025-12-03 09:16:25.356567307 +0000 UTC m=+2192.742544311" observedRunningTime="2025-12-03 09:16:25.855082322 +0000 UTC m=+2193.241059306" watchObservedRunningTime="2025-12-03 09:16:25.863648116 +0000 UTC m=+2193.249625100" Dec 03 09:16:26 crc kubenswrapper[4576]: I1203 09:16:26.981960 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:26 crc kubenswrapper[4576]: I1203 09:16:26.984172 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.001023 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.062313 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.062364 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l2vq\" (UniqueName: \"kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.062396 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.165015 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l2vq\" (UniqueName: \"kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.165123 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.165294 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.165765 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.165807 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.206332 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l2vq\" (UniqueName: \"kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq\") pod \"redhat-marketplace-sr54c\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.327120 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:27 crc kubenswrapper[4576]: W1203 09:16:27.961021 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5347943b_6090_45db_8f41_4a56a81c9784.slice/crio-c813e0b9566cc74c476b00868cbf201d06dc324f4c38db03d54d3f7c2f5b19de WatchSource:0}: Error finding container c813e0b9566cc74c476b00868cbf201d06dc324f4c38db03d54d3f7c2f5b19de: Status 404 returned error can't find the container with id c813e0b9566cc74c476b00868cbf201d06dc324f4c38db03d54d3f7c2f5b19de Dec 03 09:16:27 crc kubenswrapper[4576]: I1203 09:16:27.977545 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:28 crc kubenswrapper[4576]: I1203 09:16:28.869426 4576 generic.go:334] "Generic (PLEG): container finished" podID="5347943b-6090-45db-8f41-4a56a81c9784" containerID="9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b" exitCode=0 Dec 03 09:16:28 crc kubenswrapper[4576]: I1203 09:16:28.869571 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerDied","Data":"9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b"} Dec 03 09:16:28 crc kubenswrapper[4576]: I1203 09:16:28.871028 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerStarted","Data":"c813e0b9566cc74c476b00868cbf201d06dc324f4c38db03d54d3f7c2f5b19de"} Dec 03 09:16:29 crc kubenswrapper[4576]: I1203 09:16:29.392853 4576 scope.go:117] "RemoveContainer" containerID="12842b7504adc88b5a3d8946287877b7b97f0713d634f10410c796f4537e041e" Dec 03 09:16:29 crc kubenswrapper[4576]: I1203 09:16:29.447226 4576 scope.go:117] "RemoveContainer" containerID="7fbb43d7ccd2e9d609a194a1f4f262ea418cb904d1ed59b11ea9b9ffaba89bc3" Dec 03 09:16:29 crc kubenswrapper[4576]: I1203 09:16:29.882178 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerStarted","Data":"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463"} Dec 03 09:16:29 crc kubenswrapper[4576]: I1203 09:16:29.943146 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:29 crc kubenswrapper[4576]: I1203 09:16:29.943808 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:30 crc kubenswrapper[4576]: I1203 09:16:30.891240 4576 generic.go:334] "Generic (PLEG): container finished" podID="5347943b-6090-45db-8f41-4a56a81c9784" containerID="28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463" exitCode=0 Dec 03 09:16:30 crc kubenswrapper[4576]: I1203 09:16:30.891354 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerDied","Data":"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463"} Dec 03 09:16:30 crc kubenswrapper[4576]: I1203 09:16:30.989841 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qn27t" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="registry-server" probeResult="failure" output=< Dec 03 09:16:30 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:16:30 crc kubenswrapper[4576]: > Dec 03 09:16:32 crc kubenswrapper[4576]: I1203 09:16:32.564412 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:32 crc kubenswrapper[4576]: I1203 09:16:32.918713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerStarted","Data":"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c"} Dec 03 09:16:32 crc kubenswrapper[4576]: I1203 09:16:32.973323 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sr54c" podStartSLOduration=3.863426059 podStartE2EDuration="6.973306324s" podCreationTimestamp="2025-12-03 09:16:26 +0000 UTC" firstStartedPulling="2025-12-03 09:16:28.874077367 +0000 UTC m=+2196.260054351" lastFinishedPulling="2025-12-03 09:16:31.983957622 +0000 UTC m=+2199.369934616" observedRunningTime="2025-12-03 09:16:32.961047099 +0000 UTC m=+2200.347024083" watchObservedRunningTime="2025-12-03 09:16:32.973306324 +0000 UTC m=+2200.359283298" Dec 03 09:16:33 crc kubenswrapper[4576]: I1203 09:16:33.613465 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:37 crc kubenswrapper[4576]: I1203 09:16:37.328721 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:37 crc kubenswrapper[4576]: I1203 09:16:37.329248 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:37 crc kubenswrapper[4576]: I1203 09:16:37.377187 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:38 crc kubenswrapper[4576]: I1203 09:16:38.045981 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:38 crc kubenswrapper[4576]: I1203 09:16:38.111573 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:38 crc kubenswrapper[4576]: I1203 09:16:38.261660 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="rabbitmq" containerID="cri-o://a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346" gracePeriod=604796 Dec 03 09:16:38 crc kubenswrapper[4576]: I1203 09:16:38.595716 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="rabbitmq" containerID="cri-o://cc9f6bef3fd0728bfe32fd0ac02369d31893e294e46b78cad2f1ed3e07c33ce5" gracePeriod=604794 Dec 03 09:16:39 crc kubenswrapper[4576]: I1203 09:16:39.991836 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.009221 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sr54c" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="registry-server" containerID="cri-o://38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c" gracePeriod=2 Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.060483 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.178493 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.582870 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.634722 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.827317 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content\") pod \"5347943b-6090-45db-8f41-4a56a81c9784\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.827372 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l2vq\" (UniqueName: \"kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq\") pod \"5347943b-6090-45db-8f41-4a56a81c9784\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.827557 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities\") pod \"5347943b-6090-45db-8f41-4a56a81c9784\" (UID: \"5347943b-6090-45db-8f41-4a56a81c9784\") " Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.828420 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities" (OuterVolumeSpecName: "utilities") pod "5347943b-6090-45db-8f41-4a56a81c9784" (UID: "5347943b-6090-45db-8f41-4a56a81c9784"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.836742 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq" (OuterVolumeSpecName: "kube-api-access-5l2vq") pod "5347943b-6090-45db-8f41-4a56a81c9784" (UID: "5347943b-6090-45db-8f41-4a56a81c9784"). InnerVolumeSpecName "kube-api-access-5l2vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.896048 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5347943b-6090-45db-8f41-4a56a81c9784" (UID: "5347943b-6090-45db-8f41-4a56a81c9784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.929585 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.929624 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5347943b-6090-45db-8f41-4a56a81c9784-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:40 crc kubenswrapper[4576]: I1203 09:16:40.929636 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l2vq\" (UniqueName: \"kubernetes.io/projected/5347943b-6090-45db-8f41-4a56a81c9784-kube-api-access-5l2vq\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.018918 4576 generic.go:334] "Generic (PLEG): container finished" podID="5347943b-6090-45db-8f41-4a56a81c9784" containerID="38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c" exitCode=0 Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.018970 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerDied","Data":"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c"} Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.018999 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sr54c" event={"ID":"5347943b-6090-45db-8f41-4a56a81c9784","Type":"ContainerDied","Data":"c813e0b9566cc74c476b00868cbf201d06dc324f4c38db03d54d3f7c2f5b19de"} Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.019016 4576 scope.go:117] "RemoveContainer" containerID="38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.019175 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sr54c" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.043283 4576 scope.go:117] "RemoveContainer" containerID="28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.073589 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.075326 4576 scope.go:117] "RemoveContainer" containerID="9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.081268 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sr54c"] Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.119614 4576 scope.go:117] "RemoveContainer" containerID="38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c" Dec 03 09:16:41 crc kubenswrapper[4576]: E1203 09:16:41.120082 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c\": container with ID starting with 38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c not found: ID does not exist" containerID="38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.120183 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c"} err="failed to get container status \"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c\": rpc error: code = NotFound desc = could not find container \"38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c\": container with ID starting with 38df8cca0933a426621f599b70b07a737d5c4f2cb74fe3df71688577e6e0953c not found: ID does not exist" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.120261 4576 scope.go:117] "RemoveContainer" containerID="28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463" Dec 03 09:16:41 crc kubenswrapper[4576]: E1203 09:16:41.120688 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463\": container with ID starting with 28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463 not found: ID does not exist" containerID="28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.120761 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463"} err="failed to get container status \"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463\": rpc error: code = NotFound desc = could not find container \"28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463\": container with ID starting with 28fce0eec09bb4b6c05270f4b704c414b9d10a1a555c56909e32fa5e7ccc2463 not found: ID does not exist" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.120822 4576 scope.go:117] "RemoveContainer" containerID="9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b" Dec 03 09:16:41 crc kubenswrapper[4576]: E1203 09:16:41.121100 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b\": container with ID starting with 9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b not found: ID does not exist" containerID="9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.121179 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b"} err="failed to get container status \"9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b\": rpc error: code = NotFound desc = could not find container \"9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b\": container with ID starting with 9d6ef2bdc75ba10a83d59ea6594623cb8cf6175c78b16166dfb2d8fc7725839b not found: ID does not exist" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.226825 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.227031 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qn27t" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="registry-server" containerID="cri-o://3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144" gracePeriod=2 Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.687485 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5347943b-6090-45db-8f41-4a56a81c9784" path="/var/lib/kubelet/pods/5347943b-6090-45db-8f41-4a56a81c9784/volumes" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.693346 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.846744 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content\") pod \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.846956 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8z8l\" (UniqueName: \"kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l\") pod \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.847036 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities\") pod \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\" (UID: \"c0de047e-34a4-4a3e-aa97-d39bd43517e2\") " Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.848685 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities" (OuterVolumeSpecName: "utilities") pod "c0de047e-34a4-4a3e-aa97-d39bd43517e2" (UID: "c0de047e-34a4-4a3e-aa97-d39bd43517e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.855374 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l" (OuterVolumeSpecName: "kube-api-access-z8z8l") pod "c0de047e-34a4-4a3e-aa97-d39bd43517e2" (UID: "c0de047e-34a4-4a3e-aa97-d39bd43517e2"). InnerVolumeSpecName "kube-api-access-z8z8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.912077 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0de047e-34a4-4a3e-aa97-d39bd43517e2" (UID: "c0de047e-34a4-4a3e-aa97-d39bd43517e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.949774 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.949811 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8z8l\" (UniqueName: \"kubernetes.io/projected/c0de047e-34a4-4a3e-aa97-d39bd43517e2-kube-api-access-z8z8l\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:41 crc kubenswrapper[4576]: I1203 09:16:41.949823 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0de047e-34a4-4a3e-aa97-d39bd43517e2-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.033742 4576 generic.go:334] "Generic (PLEG): container finished" podID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerID="3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144" exitCode=0 Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.033808 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn27t" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.033826 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerDied","Data":"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144"} Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.034213 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn27t" event={"ID":"c0de047e-34a4-4a3e-aa97-d39bd43517e2","Type":"ContainerDied","Data":"0e763e5a1d0372f2b35b9f30703c768c1d3785b4bc8169c31022aad0b037389d"} Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.034233 4576 scope.go:117] "RemoveContainer" containerID="3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.080974 4576 scope.go:117] "RemoveContainer" containerID="3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.081726 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.091307 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qn27t"] Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.102329 4576 scope.go:117] "RemoveContainer" containerID="6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.122241 4576 scope.go:117] "RemoveContainer" containerID="3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144" Dec 03 09:16:42 crc kubenswrapper[4576]: E1203 09:16:42.122800 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144\": container with ID starting with 3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144 not found: ID does not exist" containerID="3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.122832 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144"} err="failed to get container status \"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144\": rpc error: code = NotFound desc = could not find container \"3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144\": container with ID starting with 3912f1fb11cb947a84fb9b0174d1e69ab196104665c71236666095bc27716144 not found: ID does not exist" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.122854 4576 scope.go:117] "RemoveContainer" containerID="3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc" Dec 03 09:16:42 crc kubenswrapper[4576]: E1203 09:16:42.123492 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc\": container with ID starting with 3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc not found: ID does not exist" containerID="3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.123632 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc"} err="failed to get container status \"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc\": rpc error: code = NotFound desc = could not find container \"3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc\": container with ID starting with 3c54d205a7212e0268128b9ee63449e9572224c285b57582352f8a0c3be860fc not found: ID does not exist" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.123656 4576 scope.go:117] "RemoveContainer" containerID="6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960" Dec 03 09:16:42 crc kubenswrapper[4576]: E1203 09:16:42.123899 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960\": container with ID starting with 6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960 not found: ID does not exist" containerID="6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960" Dec 03 09:16:42 crc kubenswrapper[4576]: I1203 09:16:42.123920 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960"} err="failed to get container status \"6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960\": rpc error: code = NotFound desc = could not find container \"6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960\": container with ID starting with 6e4a309e2b90c11166d45fa071a2829b56e311f218b608d46d5c47e973d7e960 not found: ID does not exist" Dec 03 09:16:43 crc kubenswrapper[4576]: I1203 09:16:43.689181 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" path="/var/lib/kubelet/pods/c0de047e-34a4-4a3e-aa97-d39bd43517e2/volumes" Dec 03 09:16:44 crc kubenswrapper[4576]: I1203 09:16:44.911873 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.023360 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.023680 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.023811 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.023958 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spswx\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024056 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024167 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024302 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024398 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024506 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024609 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024738 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.024884 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins\") pod \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\" (UID: \"6d6451b7-d2df-487c-afa3-3f1e56758ce4\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.026024 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.029165 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.033708 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info" (OuterVolumeSpecName: "pod-info") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.034137 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.037830 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.038437 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.043684 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx" (OuterVolumeSpecName: "kube-api-access-spswx") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "kube-api-access-spswx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.067886 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127601 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127634 4576 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6d6451b7-d2df-487c-afa3-3f1e56758ce4-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127646 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spswx\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-kube-api-access-spswx\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127681 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127693 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127705 4576 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.127717 4576 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6d6451b7-d2df-487c-afa3-3f1e56758ce4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.184958 4576 generic.go:334] "Generic (PLEG): container finished" podID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerID="a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346" exitCode=0 Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.185032 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerDied","Data":"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346"} Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.185058 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6d6451b7-d2df-487c-afa3-3f1e56758ce4","Type":"ContainerDied","Data":"966bfff0933c521ea1d2bb5cc20a33f34753c788f9f0cbd3dec797aefe8b27c6"} Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.185076 4576 scope.go:117] "RemoveContainer" containerID="a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.185235 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.191442 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf" (OuterVolumeSpecName: "server-conf") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.194411 4576 generic.go:334] "Generic (PLEG): container finished" podID="c20e6200-091a-47c3-afef-d1b4d9538309" containerID="cc9f6bef3fd0728bfe32fd0ac02369d31893e294e46b78cad2f1ed3e07c33ce5" exitCode=0 Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.194503 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerDied","Data":"cc9f6bef3fd0728bfe32fd0ac02369d31893e294e46b78cad2f1ed3e07c33ce5"} Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.212042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data" (OuterVolumeSpecName: "config-data") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.230789 4576 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.230828 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6d6451b7-d2df-487c-afa3-3f1e56758ce4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.252202 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.281063 4576 scope.go:117] "RemoveContainer" containerID="20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.333914 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.349075 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6d6451b7-d2df-487c-afa3-3f1e56758ce4" (UID: "6d6451b7-d2df-487c-afa3-3f1e56758ce4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.389295 4576 scope.go:117] "RemoveContainer" containerID="a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.395691 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346\": container with ID starting with a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346 not found: ID does not exist" containerID="a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.395736 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346"} err="failed to get container status \"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346\": rpc error: code = NotFound desc = could not find container \"a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346\": container with ID starting with a722bfc3fa26a7f568a1e342ec5b26478d7491b99e396ab25fbe73bd5d94d346 not found: ID does not exist" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.395762 4576 scope.go:117] "RemoveContainer" containerID="20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.396350 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8\": container with ID starting with 20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8 not found: ID does not exist" containerID="20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.396403 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8"} err="failed to get container status \"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8\": rpc error: code = NotFound desc = could not find container \"20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8\": container with ID starting with 20c7cdb1b03745675354ae5189269b69daad4f5c2b1e25e038f123970827d1b8 not found: ID does not exist" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.438697 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6d6451b7-d2df-487c-afa3-3f1e56758ce4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.447307 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543748 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543833 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543858 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543888 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543916 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543936 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thkt6\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543962 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.543997 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.544060 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.544119 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.544164 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls\") pod \"c20e6200-091a-47c3-afef-d1b4d9538309\" (UID: \"c20e6200-091a-47c3-afef-d1b4d9538309\") " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.544565 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.560353 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.561560 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.582988 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.583413 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info" (OuterVolumeSpecName: "pod-info") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.584009 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6" (OuterVolumeSpecName: "kube-api-access-thkt6") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "kube-api-access-thkt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.586739 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.635604 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646230 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646264 4576 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c20e6200-091a-47c3-afef-d1b4d9538309-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646273 4576 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646281 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646289 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thkt6\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-kube-api-access-thkt6\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646299 4576 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c20e6200-091a-47c3-afef-d1b4d9538309-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646308 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.646328 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.659634 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.672165 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data" (OuterVolumeSpecName: "config-data") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.719961 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.734747 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.754920 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.754943 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.767935 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf" (OuterVolumeSpecName: "server-conf") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783352 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783796 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783814 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783829 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="setup-container" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783837 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="setup-container" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783859 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="extract-content" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783865 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="extract-content" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783878 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783884 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783909 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="setup-container" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783915 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="setup-container" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783923 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783929 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783938 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="extract-utilities" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783944 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="extract-utilities" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783960 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="extract-utilities" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783965 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="extract-utilities" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783980 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783985 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: E1203 09:16:45.783994 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="extract-content" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.783999 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="extract-content" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.784164 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="5347943b-6090-45db-8f41-4a56a81c9784" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.784182 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.784194 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0de047e-34a4-4a3e-aa97-d39bd43517e2" containerName="registry-server" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.784208 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" containerName="rabbitmq" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.785172 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801236 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801263 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801353 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801410 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801353 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801483 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-x8bsx" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801540 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.801720 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856330 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856395 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856451 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0c9f8c4-e8c5-4033-ac50-305178e9010f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856495 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856614 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856664 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0c9f8c4-e8c5-4033-ac50-305178e9010f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856691 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856747 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhlxh\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-kube-api-access-jhlxh\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856771 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856798 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.856866 4576 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c20e6200-091a-47c3-afef-d1b4d9538309-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.898782 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c20e6200-091a-47c3-afef-d1b4d9538309" (UID: "c20e6200-091a-47c3-afef-d1b4d9538309"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958503 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958611 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958663 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0c9f8c4-e8c5-4033-ac50-305178e9010f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958713 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958783 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958809 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958860 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0c9f8c4-e8c5-4033-ac50-305178e9010f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958906 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958964 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhlxh\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-kube-api-access-jhlxh\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.958987 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.959013 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.959077 4576 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c20e6200-091a-47c3-afef-d1b4d9538309-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.959878 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.960487 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.960835 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.961647 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.962372 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0c9f8c4-e8c5-4033-ac50-305178e9010f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.962915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.965456 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.965710 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0c9f8c4-e8c5-4033-ac50-305178e9010f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.969163 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0c9f8c4-e8c5-4033-ac50-305178e9010f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.969625 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.978311 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhlxh\" (UniqueName: \"kubernetes.io/projected/d0c9f8c4-e8c5-4033-ac50-305178e9010f-kube-api-access-jhlxh\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:45 crc kubenswrapper[4576]: I1203 09:16:45.998619 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0c9f8c4-e8c5-4033-ac50-305178e9010f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.128189 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.209093 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c20e6200-091a-47c3-afef-d1b4d9538309","Type":"ContainerDied","Data":"297f604f3bb58820b9e9771e53c01494b83023596d6972b274bbe8c6fc988040"} Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.209483 4576 scope.go:117] "RemoveContainer" containerID="cc9f6bef3fd0728bfe32fd0ac02369d31893e294e46b78cad2f1ed3e07c33ce5" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.209239 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.261993 4576 scope.go:117] "RemoveContainer" containerID="35f9490152a4739453e519a118067b6d218f1ac074563732fa6b9864d8353092" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.262726 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.277308 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.293653 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.295800 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.305797 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.306025 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.306166 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-knqw9" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.307031 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.307212 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.307343 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.307459 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.351585 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.467157 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-config-data\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.467202 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.467220 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.467289 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.467314 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474391 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474504 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474685 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk5mf\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-kube-api-access-vk5mf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474717 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474754 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.474879 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576331 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576396 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576438 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576472 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576521 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk5mf\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-kube-api-access-vk5mf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576567 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576612 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576681 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576725 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-config-data\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576757 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.576778 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.577514 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.577974 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.578082 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.578087 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-config-data\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.578228 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.579915 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.582404 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.583637 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.605084 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.605834 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.609788 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk5mf\" (UniqueName: \"kubernetes.io/projected/fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9-kube-api-access-vk5mf\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.625974 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9\") " pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.661093 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 09:16:46 crc kubenswrapper[4576]: I1203 09:16:46.757600 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.044354 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.126793 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.133742 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.135744 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.149540 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.264035 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0c9f8c4-e8c5-4033-ac50-305178e9010f","Type":"ContainerStarted","Data":"7c772fd45c18728531d06d009d02ab8aaca6fd266db88ec2cc246debad075e61"} Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.265718 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9","Type":"ContainerStarted","Data":"86bcc1ec49a5346235d4fc68aadc3659e60f1bdc55e8f2d2a7a46f29a720dfb5"} Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290433 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290494 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290557 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290586 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290686 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290771 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.290789 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx8kp\" (UniqueName: \"kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393007 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393309 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393455 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393602 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393797 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.393903 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx8kp\" (UniqueName: \"kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.394060 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.394751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.394892 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.394906 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.395069 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.395237 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.395306 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.410767 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx8kp\" (UniqueName: \"kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp\") pod \"dnsmasq-dns-d558885bc-8k79z\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.448731 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.705082 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d6451b7-d2df-487c-afa3-3f1e56758ce4" path="/var/lib/kubelet/pods/6d6451b7-d2df-487c-afa3-3f1e56758ce4/volumes" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.706683 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c20e6200-091a-47c3-afef-d1b4d9538309" path="/var/lib/kubelet/pods/c20e6200-091a-47c3-afef-d1b4d9538309/volumes" Dec 03 09:16:47 crc kubenswrapper[4576]: I1203 09:16:47.781155 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:16:47 crc kubenswrapper[4576]: W1203 09:16:47.782811 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d60da8a_0bd3_449c_a111_150182756bc6.slice/crio-25f292b624309b0cc79186b60b10f4cb411e289e8af7310d7bf7015e0cab628a WatchSource:0}: Error finding container 25f292b624309b0cc79186b60b10f4cb411e289e8af7310d7bf7015e0cab628a: Status 404 returned error can't find the container with id 25f292b624309b0cc79186b60b10f4cb411e289e8af7310d7bf7015e0cab628a Dec 03 09:16:48 crc kubenswrapper[4576]: I1203 09:16:48.278056 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerStarted","Data":"731ceca16766cf59999f4c5c8d15e8d84fb4a951f376c6902970a613f948219d"} Dec 03 09:16:48 crc kubenswrapper[4576]: I1203 09:16:48.278300 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerStarted","Data":"25f292b624309b0cc79186b60b10f4cb411e289e8af7310d7bf7015e0cab628a"} Dec 03 09:16:49 crc kubenswrapper[4576]: I1203 09:16:49.289873 4576 generic.go:334] "Generic (PLEG): container finished" podID="4d60da8a-0bd3-449c-a111-150182756bc6" containerID="731ceca16766cf59999f4c5c8d15e8d84fb4a951f376c6902970a613f948219d" exitCode=0 Dec 03 09:16:49 crc kubenswrapper[4576]: I1203 09:16:49.289968 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerDied","Data":"731ceca16766cf59999f4c5c8d15e8d84fb4a951f376c6902970a613f948219d"} Dec 03 09:16:49 crc kubenswrapper[4576]: I1203 09:16:49.294666 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0c9f8c4-e8c5-4033-ac50-305178e9010f","Type":"ContainerStarted","Data":"0fca0d995f334335608cd5d4335c4fad55e9813419cf0f7308992b45ec7fa0ff"} Dec 03 09:16:49 crc kubenswrapper[4576]: I1203 09:16:49.312130 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9","Type":"ContainerStarted","Data":"76c7a885f4a3a06bac229a230e47ae0919ce9e4b727545d517e7dab941aad422"} Dec 03 09:16:50 crc kubenswrapper[4576]: I1203 09:16:50.322408 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerStarted","Data":"e9c579a1bc9a19fdea89b72c154b492609181055870da45ba8a0c496116c447b"} Dec 03 09:16:50 crc kubenswrapper[4576]: I1203 09:16:50.344538 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d558885bc-8k79z" podStartSLOduration=3.344502547 podStartE2EDuration="3.344502547s" podCreationTimestamp="2025-12-03 09:16:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:16:50.338647459 +0000 UTC m=+2217.724624443" watchObservedRunningTime="2025-12-03 09:16:50.344502547 +0000 UTC m=+2217.730479531" Dec 03 09:16:51 crc kubenswrapper[4576]: I1203 09:16:51.333608 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.453130 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.542916 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.543172 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="dnsmasq-dns" containerID="cri-o://8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355" gracePeriod=10 Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.734320 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-z42kz"] Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.744142 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.755842 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-z42kz"] Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805512 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805594 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805614 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrplw\" (UniqueName: \"kubernetes.io/projected/9e70ca50-713f-40fd-ac9a-89af89af62ba-kube-api-access-vrplw\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805653 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805709 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805743 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-config\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.805758 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907135 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907479 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-config\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907590 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907651 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrplw\" (UniqueName: \"kubernetes.io/projected/9e70ca50-713f-40fd-ac9a-89af89af62ba-kube-api-access-vrplw\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907672 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907706 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.907762 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.908755 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-sb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.909103 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-swift-storage-0\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.909131 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-ovsdbserver-nb\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.909594 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-dns-svc\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.909811 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-openstack-edpm-ipam\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.909962 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e70ca50-713f-40fd-ac9a-89af89af62ba-config\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:57 crc kubenswrapper[4576]: I1203 09:16:57.934679 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrplw\" (UniqueName: \"kubernetes.io/projected/9e70ca50-713f-40fd-ac9a-89af89af62ba-kube-api-access-vrplw\") pod \"dnsmasq-dns-67cb876dc9-z42kz\" (UID: \"9e70ca50-713f-40fd-ac9a-89af89af62ba\") " pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.075620 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.190372 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.315338 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fltnf\" (UniqueName: \"kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.315402 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.315601 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.316052 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.316148 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.316172 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.323069 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf" (OuterVolumeSpecName: "kube-api-access-fltnf") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "kube-api-access-fltnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.383874 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config" (OuterVolumeSpecName: "config") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.408509 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.409356 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.418951 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.419947 4576 generic.go:334] "Generic (PLEG): container finished" podID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerID="8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355" exitCode=0 Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.420002 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.420001 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" event={"ID":"031dd78c-4b11-401e-a442-f6824ded6e7d","Type":"ContainerDied","Data":"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355"} Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.420731 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zxdsl" event={"ID":"031dd78c-4b11-401e-a442-f6824ded6e7d","Type":"ContainerDied","Data":"1e417014bb984973852fd2eb9516f58914a59c0b0f0dff4d55776cd5d7376095"} Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.420762 4576 scope.go:117] "RemoveContainer" containerID="8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.421223 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") pod \"031dd78c-4b11-401e-a442-f6824ded6e7d\" (UID: \"031dd78c-4b11-401e-a442-f6824ded6e7d\") " Dec 03 09:16:58 crc kubenswrapper[4576]: W1203 09:16:58.421359 4576 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/031dd78c-4b11-401e-a442-f6824ded6e7d/volumes/kubernetes.io~configmap/dns-svc Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.421804 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.424274 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "031dd78c-4b11-401e-a442-f6824ded6e7d" (UID: "031dd78c-4b11-401e-a442-f6824ded6e7d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.427330 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.427672 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.427787 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.427870 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.427952 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fltnf\" (UniqueName: \"kubernetes.io/projected/031dd78c-4b11-401e-a442-f6824ded6e7d-kube-api-access-fltnf\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.428022 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/031dd78c-4b11-401e-a442-f6824ded6e7d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.466695 4576 scope.go:117] "RemoveContainer" containerID="9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.488934 4576 scope.go:117] "RemoveContainer" containerID="8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355" Dec 03 09:16:58 crc kubenswrapper[4576]: E1203 09:16:58.489897 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355\": container with ID starting with 8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355 not found: ID does not exist" containerID="8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.489942 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355"} err="failed to get container status \"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355\": rpc error: code = NotFound desc = could not find container \"8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355\": container with ID starting with 8f6969c402b9a2f68d4ea1b41fdfe670e48b0b8a30deabddff51eb816e63a355 not found: ID does not exist" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.489972 4576 scope.go:117] "RemoveContainer" containerID="9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d" Dec 03 09:16:58 crc kubenswrapper[4576]: E1203 09:16:58.490371 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d\": container with ID starting with 9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d not found: ID does not exist" containerID="9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.490417 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d"} err="failed to get container status \"9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d\": rpc error: code = NotFound desc = could not find container \"9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d\": container with ID starting with 9f96812c8497dd2eb80d2673835a1cb099b0e6c3904684832fa9eab124a79b5d not found: ID does not exist" Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.620244 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cb876dc9-z42kz"] Dec 03 09:16:58 crc kubenswrapper[4576]: W1203 09:16:58.626080 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e70ca50_713f_40fd_ac9a_89af89af62ba.slice/crio-a9ee033ec6a7510c430671850acacf42dd5e96f198f54c797f6287248e594e7b WatchSource:0}: Error finding container a9ee033ec6a7510c430671850acacf42dd5e96f198f54c797f6287248e594e7b: Status 404 returned error can't find the container with id a9ee033ec6a7510c430671850acacf42dd5e96f198f54c797f6287248e594e7b Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.868608 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:16:58 crc kubenswrapper[4576]: I1203 09:16:58.885828 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zxdsl"] Dec 03 09:16:59 crc kubenswrapper[4576]: I1203 09:16:59.442747 4576 generic.go:334] "Generic (PLEG): container finished" podID="9e70ca50-713f-40fd-ac9a-89af89af62ba" containerID="77c8c1161a7ad1768874b305b7c10af31dccf85d6037a75340c3988251c4b5d1" exitCode=0 Dec 03 09:16:59 crc kubenswrapper[4576]: I1203 09:16:59.443752 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" event={"ID":"9e70ca50-713f-40fd-ac9a-89af89af62ba","Type":"ContainerDied","Data":"77c8c1161a7ad1768874b305b7c10af31dccf85d6037a75340c3988251c4b5d1"} Dec 03 09:16:59 crc kubenswrapper[4576]: I1203 09:16:59.444656 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" event={"ID":"9e70ca50-713f-40fd-ac9a-89af89af62ba","Type":"ContainerStarted","Data":"a9ee033ec6a7510c430671850acacf42dd5e96f198f54c797f6287248e594e7b"} Dec 03 09:16:59 crc kubenswrapper[4576]: I1203 09:16:59.691194 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" path="/var/lib/kubelet/pods/031dd78c-4b11-401e-a442-f6824ded6e7d/volumes" Dec 03 09:17:00 crc kubenswrapper[4576]: I1203 09:17:00.462304 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" event={"ID":"9e70ca50-713f-40fd-ac9a-89af89af62ba","Type":"ContainerStarted","Data":"80eb0ae1509e1805c233db4ea8a718ce5d99547bfa67f67b42a04b1fa703eea6"} Dec 03 09:17:00 crc kubenswrapper[4576]: I1203 09:17:00.463703 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:17:00 crc kubenswrapper[4576]: I1203 09:17:00.489584 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" podStartSLOduration=3.48956467 podStartE2EDuration="3.48956467s" podCreationTimestamp="2025-12-03 09:16:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:17:00.48139931 +0000 UTC m=+2227.867376294" watchObservedRunningTime="2025-12-03 09:17:00.48956467 +0000 UTC m=+2227.875541654" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.077942 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67cb876dc9-z42kz" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.184475 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.184756 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d558885bc-8k79z" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="dnsmasq-dns" containerID="cri-o://e9c579a1bc9a19fdea89b72c154b492609181055870da45ba8a0c496116c447b" gracePeriod=10 Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.560160 4576 generic.go:334] "Generic (PLEG): container finished" podID="4d60da8a-0bd3-449c-a111-150182756bc6" containerID="e9c579a1bc9a19fdea89b72c154b492609181055870da45ba8a0c496116c447b" exitCode=0 Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.560408 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerDied","Data":"e9c579a1bc9a19fdea89b72c154b492609181055870da45ba8a0c496116c447b"} Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.736568 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878282 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878583 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878643 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878772 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rx8kp\" (UniqueName: \"kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878827 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878845 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.878886 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam\") pod \"4d60da8a-0bd3-449c-a111-150182756bc6\" (UID: \"4d60da8a-0bd3-449c-a111-150182756bc6\") " Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.884430 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp" (OuterVolumeSpecName: "kube-api-access-rx8kp") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "kube-api-access-rx8kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.935822 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.937115 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.947432 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.955830 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.959143 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.967780 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config" (OuterVolumeSpecName: "config") pod "4d60da8a-0bd3-449c-a111-150182756bc6" (UID: "4d60da8a-0bd3-449c-a111-150182756bc6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981091 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rx8kp\" (UniqueName: \"kubernetes.io/projected/4d60da8a-0bd3-449c-a111-150182756bc6-kube-api-access-rx8kp\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981121 4576 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981130 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981139 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981147 4576 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-config\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981155 4576 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:08 crc kubenswrapper[4576]: I1203 09:17:08.981163 4576 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d60da8a-0bd3-449c-a111-150182756bc6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.614430 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-8k79z" event={"ID":"4d60da8a-0bd3-449c-a111-150182756bc6","Type":"ContainerDied","Data":"25f292b624309b0cc79186b60b10f4cb411e289e8af7310d7bf7015e0cab628a"} Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.614490 4576 scope.go:117] "RemoveContainer" containerID="e9c579a1bc9a19fdea89b72c154b492609181055870da45ba8a0c496116c447b" Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.614712 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-8k79z" Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.640783 4576 scope.go:117] "RemoveContainer" containerID="731ceca16766cf59999f4c5c8d15e8d84fb4a951f376c6902970a613f948219d" Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.666872 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.675833 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-8k79z"] Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.680460 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.680500 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:17:09 crc kubenswrapper[4576]: I1203 09:17:09.688422 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" path="/var/lib/kubelet/pods/4d60da8a-0bd3-449c-a111-150182756bc6/volumes" Dec 03 09:17:20 crc kubenswrapper[4576]: I1203 09:17:20.719089 4576 generic.go:334] "Generic (PLEG): container finished" podID="d0c9f8c4-e8c5-4033-ac50-305178e9010f" containerID="0fca0d995f334335608cd5d4335c4fad55e9813419cf0f7308992b45ec7fa0ff" exitCode=0 Dec 03 09:17:20 crc kubenswrapper[4576]: I1203 09:17:20.719185 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0c9f8c4-e8c5-4033-ac50-305178e9010f","Type":"ContainerDied","Data":"0fca0d995f334335608cd5d4335c4fad55e9813419cf0f7308992b45ec7fa0ff"} Dec 03 09:17:20 crc kubenswrapper[4576]: I1203 09:17:20.722402 4576 generic.go:334] "Generic (PLEG): container finished" podID="fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9" containerID="76c7a885f4a3a06bac229a230e47ae0919ce9e4b727545d517e7dab941aad422" exitCode=0 Dec 03 09:17:20 crc kubenswrapper[4576]: I1203 09:17:20.722446 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9","Type":"ContainerDied","Data":"76c7a885f4a3a06bac229a230e47ae0919ce9e4b727545d517e7dab941aad422"} Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.734516 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0c9f8c4-e8c5-4033-ac50-305178e9010f","Type":"ContainerStarted","Data":"ddecc4630df0a6286fbefefdee3fca16e0eb05dc24d09828b3738395f65114e3"} Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.735125 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.739166 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9","Type":"ContainerStarted","Data":"704cb63c4ac2e4b4f7b1fb1f77c4224fd321064115b34fb141e5f16dc28bf96a"} Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.739845 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.765981 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.765961884 podStartE2EDuration="36.765961884s" podCreationTimestamp="2025-12-03 09:16:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:17:21.756900369 +0000 UTC m=+2249.142877373" watchObservedRunningTime="2025-12-03 09:17:21.765961884 +0000 UTC m=+2249.151938868" Dec 03 09:17:21 crc kubenswrapper[4576]: I1203 09:17:21.788784 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=35.788761791 podStartE2EDuration="35.788761791s" podCreationTimestamp="2025-12-03 09:16:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:17:21.78283665 +0000 UTC m=+2249.168813634" watchObservedRunningTime="2025-12-03 09:17:21.788761791 +0000 UTC m=+2249.174738775" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.629471 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp"] Dec 03 09:17:26 crc kubenswrapper[4576]: E1203 09:17:26.630477 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="init" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630491 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="init" Dec 03 09:17:26 crc kubenswrapper[4576]: E1203 09:17:26.630542 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630548 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: E1203 09:17:26.630558 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="init" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630564 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="init" Dec 03 09:17:26 crc kubenswrapper[4576]: E1203 09:17:26.630576 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630583 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630768 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="031dd78c-4b11-401e-a442-f6824ded6e7d" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.630796 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d60da8a-0bd3-449c-a111-150182756bc6" containerName="dnsmasq-dns" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.631395 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.634819 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.637236 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.637260 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.637260 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.644197 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp"] Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.669132 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7txm\" (UniqueName: \"kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.669414 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.669496 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.669795 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.772681 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7txm\" (UniqueName: \"kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.772773 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.772826 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.772911 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.790615 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.790616 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.794167 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.795751 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7txm\" (UniqueName: \"kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-86chp\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:26 crc kubenswrapper[4576]: I1203 09:17:26.968873 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:17:27 crc kubenswrapper[4576]: I1203 09:17:27.606592 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp"] Dec 03 09:17:27 crc kubenswrapper[4576]: I1203 09:17:27.789103 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" event={"ID":"fd4aa481-7064-4ebc-bc06-d706d427260d","Type":"ContainerStarted","Data":"533c8b0583a3c1029ff3570d7438eef0555dca8ade9b90d4fff1ca4fabaa77a8"} Dec 03 09:17:29 crc kubenswrapper[4576]: I1203 09:17:29.829244 4576 scope.go:117] "RemoveContainer" containerID="2d1f5c2ea88e80bf4769b167f32890e7eaa3326329a0fbbdc18f45d4c7f81136" Dec 03 09:17:36 crc kubenswrapper[4576]: I1203 09:17:36.130711 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 09:17:36 crc kubenswrapper[4576]: I1203 09:17:36.666736 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 09:17:39 crc kubenswrapper[4576]: I1203 09:17:39.684444 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:17:39 crc kubenswrapper[4576]: I1203 09:17:39.684869 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:17:44 crc kubenswrapper[4576]: E1203 09:17:44.997555 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:2a7f259a231c6621a5da5d4f23d0c783050b066a" Dec 03 09:17:44 crc kubenswrapper[4576]: E1203 09:17:44.998059 4576 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:2a7f259a231c6621a5da5d4f23d0c783050b066a" Dec 03 09:17:44 crc kubenswrapper[4576]: E1203 09:17:44.998229 4576 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 09:17:44 crc kubenswrapper[4576]: container &Container{Name:repo-setup-edpm-deployment-openstack-edpm-ipam,Image:quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:2a7f259a231c6621a5da5d4f23d0c783050b066a,Command:[],Args:[ansible-runner run /runner -p playbook.yaml -i repo-setup-edpm-deployment-openstack-edpm-ipam],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ANSIBLE_VERBOSITY,Value:2,ValueFrom:nil,},EnvVar{Name:RUNNER_PLAYBOOK,Value: Dec 03 09:17:44 crc kubenswrapper[4576]: - hosts: all Dec 03 09:17:44 crc kubenswrapper[4576]: strategy: linear Dec 03 09:17:44 crc kubenswrapper[4576]: tasks: Dec 03 09:17:44 crc kubenswrapper[4576]: - name: Enable podified-repos Dec 03 09:17:44 crc kubenswrapper[4576]: become: true Dec 03 09:17:44 crc kubenswrapper[4576]: ansible.builtin.shell: | Dec 03 09:17:44 crc kubenswrapper[4576]: set -euxo pipefail Dec 03 09:17:44 crc kubenswrapper[4576]: pushd /var/tmp Dec 03 09:17:44 crc kubenswrapper[4576]: curl -sL https://github.com/openstack-k8s-operators/repo-setup/archive/refs/heads/main.tar.gz | tar -xz Dec 03 09:17:44 crc kubenswrapper[4576]: pushd repo-setup-main Dec 03 09:17:44 crc kubenswrapper[4576]: python3 -m venv ./venv Dec 03 09:17:44 crc kubenswrapper[4576]: PBR_VERSION=0.0.0 ./venv/bin/pip install ./ Dec 03 09:17:44 crc kubenswrapper[4576]: ./venv/bin/repo-setup current-podified -b antelope Dec 03 09:17:44 crc kubenswrapper[4576]: popd Dec 03 09:17:44 crc kubenswrapper[4576]: rm -rf repo-setup-main Dec 03 09:17:44 crc kubenswrapper[4576]: Dec 03 09:17:44 crc kubenswrapper[4576]: Dec 03 09:17:44 crc kubenswrapper[4576]: ,ValueFrom:nil,},EnvVar{Name:RUNNER_EXTRA_VARS,Value: Dec 03 09:17:44 crc kubenswrapper[4576]: edpm_override_hosts: openstack-edpm-ipam Dec 03 09:17:44 crc kubenswrapper[4576]: edpm_service_type: repo-setup Dec 03 09:17:44 crc kubenswrapper[4576]: Dec 03 09:17:44 crc kubenswrapper[4576]: Dec 03 09:17:44 crc kubenswrapper[4576]: ,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:repo-setup-combined-ca-bundle,ReadOnly:false,MountPath:/var/lib/openstack/cacerts/repo-setup,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/runner/env/ssh_key,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:inventory,ReadOnly:false,MountPath:/runner/inventory/hosts,SubPath:inventory,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k7txm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:openstack-aee-default-env,},Optional:*true,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod repo-setup-edpm-deployment-openstack-edpm-ipam-86chp_openstack(fd4aa481-7064-4ebc-bc06-d706d427260d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Dec 03 09:17:44 crc kubenswrapper[4576]: > logger="UnhandledError" Dec 03 09:17:44 crc kubenswrapper[4576]: E1203 09:17:44.999832 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" podUID="fd4aa481-7064-4ebc-bc06-d706d427260d" Dec 03 09:17:46 crc kubenswrapper[4576]: E1203 09:17:46.000231 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:2a7f259a231c6621a5da5d4f23d0c783050b066a\\\"\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" podUID="fd4aa481-7064-4ebc-bc06-d706d427260d" Dec 03 09:18:00 crc kubenswrapper[4576]: I1203 09:18:00.874004 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:18:02 crc kubenswrapper[4576]: I1203 09:18:02.160735 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" event={"ID":"fd4aa481-7064-4ebc-bc06-d706d427260d","Type":"ContainerStarted","Data":"77735455f497ec6d7c33db167e32f0d7569eb600f8a9340cbc406e074ad135b4"} Dec 03 09:18:02 crc kubenswrapper[4576]: I1203 09:18:02.230030 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" podStartSLOduration=2.975382047 podStartE2EDuration="36.229998041s" podCreationTimestamp="2025-12-03 09:17:26 +0000 UTC" firstStartedPulling="2025-12-03 09:17:27.616377004 +0000 UTC m=+2255.002353988" lastFinishedPulling="2025-12-03 09:18:00.870992998 +0000 UTC m=+2288.256969982" observedRunningTime="2025-12-03 09:18:02.219246341 +0000 UTC m=+2289.605223345" watchObservedRunningTime="2025-12-03 09:18:02.229998041 +0000 UTC m=+2289.615975025" Dec 03 09:18:09 crc kubenswrapper[4576]: I1203 09:18:09.680941 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:18:09 crc kubenswrapper[4576]: I1203 09:18:09.681483 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:18:09 crc kubenswrapper[4576]: I1203 09:18:09.707657 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:18:09 crc kubenswrapper[4576]: I1203 09:18:09.712297 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:18:09 crc kubenswrapper[4576]: I1203 09:18:09.712414 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" gracePeriod=600 Dec 03 09:18:09 crc kubenswrapper[4576]: E1203 09:18:09.838287 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:18:10 crc kubenswrapper[4576]: I1203 09:18:10.249801 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" exitCode=0 Dec 03 09:18:10 crc kubenswrapper[4576]: I1203 09:18:10.249860 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324"} Dec 03 09:18:10 crc kubenswrapper[4576]: I1203 09:18:10.249910 4576 scope.go:117] "RemoveContainer" containerID="3dfdd9ffa1395b330c78c278dbb37d60033302cad8d06ba1b081f68d7feaaefc" Dec 03 09:18:10 crc kubenswrapper[4576]: I1203 09:18:10.250972 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:18:10 crc kubenswrapper[4576]: E1203 09:18:10.251492 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:18:25 crc kubenswrapper[4576]: I1203 09:18:25.395389 4576 generic.go:334] "Generic (PLEG): container finished" podID="fd4aa481-7064-4ebc-bc06-d706d427260d" containerID="77735455f497ec6d7c33db167e32f0d7569eb600f8a9340cbc406e074ad135b4" exitCode=0 Dec 03 09:18:25 crc kubenswrapper[4576]: I1203 09:18:25.395555 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" event={"ID":"fd4aa481-7064-4ebc-bc06-d706d427260d","Type":"ContainerDied","Data":"77735455f497ec6d7c33db167e32f0d7569eb600f8a9340cbc406e074ad135b4"} Dec 03 09:18:25 crc kubenswrapper[4576]: I1203 09:18:25.677697 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:18:25 crc kubenswrapper[4576]: E1203 09:18:25.677997 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.861155 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.973749 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key\") pod \"fd4aa481-7064-4ebc-bc06-d706d427260d\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.973876 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7txm\" (UniqueName: \"kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm\") pod \"fd4aa481-7064-4ebc-bc06-d706d427260d\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.974040 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory\") pod \"fd4aa481-7064-4ebc-bc06-d706d427260d\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.974082 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle\") pod \"fd4aa481-7064-4ebc-bc06-d706d427260d\" (UID: \"fd4aa481-7064-4ebc-bc06-d706d427260d\") " Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.979765 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm" (OuterVolumeSpecName: "kube-api-access-k7txm") pod "fd4aa481-7064-4ebc-bc06-d706d427260d" (UID: "fd4aa481-7064-4ebc-bc06-d706d427260d"). InnerVolumeSpecName "kube-api-access-k7txm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:18:26 crc kubenswrapper[4576]: I1203 09:18:26.980671 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "fd4aa481-7064-4ebc-bc06-d706d427260d" (UID: "fd4aa481-7064-4ebc-bc06-d706d427260d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.006954 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory" (OuterVolumeSpecName: "inventory") pod "fd4aa481-7064-4ebc-bc06-d706d427260d" (UID: "fd4aa481-7064-4ebc-bc06-d706d427260d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.024672 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fd4aa481-7064-4ebc-bc06-d706d427260d" (UID: "fd4aa481-7064-4ebc-bc06-d706d427260d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.076334 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.076368 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7txm\" (UniqueName: \"kubernetes.io/projected/fd4aa481-7064-4ebc-bc06-d706d427260d-kube-api-access-k7txm\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.076408 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.076421 4576 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4aa481-7064-4ebc-bc06-d706d427260d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.419044 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" event={"ID":"fd4aa481-7064-4ebc-bc06-d706d427260d","Type":"ContainerDied","Data":"533c8b0583a3c1029ff3570d7438eef0555dca8ade9b90d4fff1ca4fabaa77a8"} Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.419079 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="533c8b0583a3c1029ff3570d7438eef0555dca8ade9b90d4fff1ca4fabaa77a8" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.419152 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-86chp" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.545621 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv"] Dec 03 09:18:27 crc kubenswrapper[4576]: E1203 09:18:27.548748 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4aa481-7064-4ebc-bc06-d706d427260d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.548951 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4aa481-7064-4ebc-bc06-d706d427260d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.549736 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4aa481-7064-4ebc-bc06-d706d427260d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.550921 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.564015 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.564193 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.564301 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.564573 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.584425 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv"] Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.688622 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.689096 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.689144 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8tn7\" (UniqueName: \"kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.791917 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.792012 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8tn7\" (UniqueName: \"kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.792105 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.797882 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.808717 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.813310 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8tn7\" (UniqueName: \"kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-82clv\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:27 crc kubenswrapper[4576]: I1203 09:18:27.876791 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:28 crc kubenswrapper[4576]: I1203 09:18:28.407885 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv"] Dec 03 09:18:28 crc kubenswrapper[4576]: I1203 09:18:28.440100 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" event={"ID":"3fc3717e-aa96-497b-8d90-3c247a234d88","Type":"ContainerStarted","Data":"dea0186c6fccd0eb01cba94e800bc5c0c6ceeb97e095d00ade918d48de2880da"} Dec 03 09:18:29 crc kubenswrapper[4576]: I1203 09:18:29.456305 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" event={"ID":"3fc3717e-aa96-497b-8d90-3c247a234d88","Type":"ContainerStarted","Data":"8b6cea4e25bb43550cedcdd09cae958836f8aac1dbc0485ed128bfb34f1a8e55"} Dec 03 09:18:30 crc kubenswrapper[4576]: I1203 09:18:30.067616 4576 scope.go:117] "RemoveContainer" containerID="179ae80f0419e58b0552719630ed2011085849e21e27b11bc75902ab600a7dd6" Dec 03 09:18:30 crc kubenswrapper[4576]: I1203 09:18:30.092419 4576 scope.go:117] "RemoveContainer" containerID="563ee6a4f03a3609f385466bb3a6548e1faf0a4259df8814f616a82c6971b46a" Dec 03 09:18:30 crc kubenswrapper[4576]: I1203 09:18:30.169827 4576 scope.go:117] "RemoveContainer" containerID="ec00d3b0d8f59cfb96ee204c863c49f8643d79ffdf4edeb79161f50f54360d0e" Dec 03 09:18:32 crc kubenswrapper[4576]: I1203 09:18:32.720265 4576 generic.go:334] "Generic (PLEG): container finished" podID="3fc3717e-aa96-497b-8d90-3c247a234d88" containerID="8b6cea4e25bb43550cedcdd09cae958836f8aac1dbc0485ed128bfb34f1a8e55" exitCode=0 Dec 03 09:18:32 crc kubenswrapper[4576]: I1203 09:18:32.720370 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" event={"ID":"3fc3717e-aa96-497b-8d90-3c247a234d88","Type":"ContainerDied","Data":"8b6cea4e25bb43550cedcdd09cae958836f8aac1dbc0485ed128bfb34f1a8e55"} Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.116086 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.196411 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key\") pod \"3fc3717e-aa96-497b-8d90-3c247a234d88\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.196786 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8tn7\" (UniqueName: \"kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7\") pod \"3fc3717e-aa96-497b-8d90-3c247a234d88\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.196921 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory\") pod \"3fc3717e-aa96-497b-8d90-3c247a234d88\" (UID: \"3fc3717e-aa96-497b-8d90-3c247a234d88\") " Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.202705 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7" (OuterVolumeSpecName: "kube-api-access-n8tn7") pod "3fc3717e-aa96-497b-8d90-3c247a234d88" (UID: "3fc3717e-aa96-497b-8d90-3c247a234d88"). InnerVolumeSpecName "kube-api-access-n8tn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.234506 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3fc3717e-aa96-497b-8d90-3c247a234d88" (UID: "3fc3717e-aa96-497b-8d90-3c247a234d88"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.235421 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory" (OuterVolumeSpecName: "inventory") pod "3fc3717e-aa96-497b-8d90-3c247a234d88" (UID: "3fc3717e-aa96-497b-8d90-3c247a234d88"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.298977 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.299007 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8tn7\" (UniqueName: \"kubernetes.io/projected/3fc3717e-aa96-497b-8d90-3c247a234d88-kube-api-access-n8tn7\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.299018 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fc3717e-aa96-497b-8d90-3c247a234d88-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.741773 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" event={"ID":"3fc3717e-aa96-497b-8d90-3c247a234d88","Type":"ContainerDied","Data":"dea0186c6fccd0eb01cba94e800bc5c0c6ceeb97e095d00ade918d48de2880da"} Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.741814 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dea0186c6fccd0eb01cba94e800bc5c0c6ceeb97e095d00ade918d48de2880da" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.741837 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-82clv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.832646 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv"] Dec 03 09:18:34 crc kubenswrapper[4576]: E1203 09:18:34.833130 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc3717e-aa96-497b-8d90-3c247a234d88" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.833155 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc3717e-aa96-497b-8d90-3c247a234d88" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.833419 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc3717e-aa96-497b-8d90-3c247a234d88" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.834190 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.842948 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.843213 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.845095 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.849247 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.861021 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv"] Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.910012 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.910157 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.910206 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhr7t\" (UniqueName: \"kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:34 crc kubenswrapper[4576]: I1203 09:18:34.910251 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.012082 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.012263 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.012303 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhr7t\" (UniqueName: \"kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.012361 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.017933 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.020062 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.026159 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.033859 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhr7t\" (UniqueName: \"kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.160127 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.736598 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv"] Dec 03 09:18:35 crc kubenswrapper[4576]: I1203 09:18:35.752018 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" event={"ID":"a1b39b7c-1acb-467a-904c-7ee77350804b","Type":"ContainerStarted","Data":"b1949cd552b6c40b1821e4d1b935764ee3c1460eb0e71273dffe7880563e4567"} Dec 03 09:18:36 crc kubenswrapper[4576]: I1203 09:18:36.677122 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:18:36 crc kubenswrapper[4576]: E1203 09:18:36.678129 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:18:36 crc kubenswrapper[4576]: I1203 09:18:36.776733 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" event={"ID":"a1b39b7c-1acb-467a-904c-7ee77350804b","Type":"ContainerStarted","Data":"26c9ab3450b073ae54393d95508b76c068e1a1e40ebbe874ae728177e88433dd"} Dec 03 09:18:36 crc kubenswrapper[4576]: I1203 09:18:36.803828 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" podStartSLOduration=2.654338806 podStartE2EDuration="2.803793048s" podCreationTimestamp="2025-12-03 09:18:34 +0000 UTC" firstStartedPulling="2025-12-03 09:18:35.730211999 +0000 UTC m=+2323.116188983" lastFinishedPulling="2025-12-03 09:18:35.879666251 +0000 UTC m=+2323.265643225" observedRunningTime="2025-12-03 09:18:36.79498647 +0000 UTC m=+2324.180963454" watchObservedRunningTime="2025-12-03 09:18:36.803793048 +0000 UTC m=+2324.189770032" Dec 03 09:18:47 crc kubenswrapper[4576]: I1203 09:18:47.677391 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:18:47 crc kubenswrapper[4576]: E1203 09:18:47.678312 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:18:59 crc kubenswrapper[4576]: I1203 09:18:59.678845 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:18:59 crc kubenswrapper[4576]: E1203 09:18:59.679938 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:19:13 crc kubenswrapper[4576]: I1203 09:19:13.677143 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:19:13 crc kubenswrapper[4576]: E1203 09:19:13.678277 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:19:26 crc kubenswrapper[4576]: I1203 09:19:26.678597 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:19:26 crc kubenswrapper[4576]: E1203 09:19:26.680300 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.242271 4576 scope.go:117] "RemoveContainer" containerID="b4b98a8d6c628e682631dfcc383328641fe0d3cc133f9eeb49129680a5ce7511" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.271250 4576 scope.go:117] "RemoveContainer" containerID="80c57d3c0ad3fbb0e6afa522538c05ba7514a3151fa069b74598f692e556f6fb" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.294017 4576 scope.go:117] "RemoveContainer" containerID="ec6fb260ace858ff3252caf06df438bd629d9b661dc002528047350d813cb3f2" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.319377 4576 scope.go:117] "RemoveContainer" containerID="911097f0a9e8cada88ce210c8b6e3adebca482981d855dfbb11e27d113f3b59e" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.342901 4576 scope.go:117] "RemoveContainer" containerID="84fad1d87e8198d81331d0ccad98dbeb679c5449a353d8c8fe08dbe809622c04" Dec 03 09:19:30 crc kubenswrapper[4576]: I1203 09:19:30.365672 4576 scope.go:117] "RemoveContainer" containerID="993d7d5ae0d126496d669c638716a16f232631f0a0b7e46c5aed880460e30082" Dec 03 09:19:37 crc kubenswrapper[4576]: I1203 09:19:37.677434 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:19:37 crc kubenswrapper[4576]: E1203 09:19:37.678073 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:19:52 crc kubenswrapper[4576]: I1203 09:19:52.678098 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:19:52 crc kubenswrapper[4576]: E1203 09:19:52.678972 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:20:07 crc kubenswrapper[4576]: I1203 09:20:07.677290 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:20:07 crc kubenswrapper[4576]: E1203 09:20:07.678092 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:20:21 crc kubenswrapper[4576]: I1203 09:20:21.678332 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:20:21 crc kubenswrapper[4576]: E1203 09:20:21.679371 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.072251 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tjp4r"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.088679 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-rvmn2"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.100047 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1fd2-account-create-update-td6rc"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.109639 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tjp4r"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.118890 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1fd2-account-create-update-td6rc"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.127223 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-899d-account-create-update-p4njf"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.136554 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-rvmn2"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.145095 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-899d-account-create-update-p4njf"] Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.692194 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02edecfe-6126-4456-bedd-6ce9a3f68ac7" path="/var/lib/kubelet/pods/02edecfe-6126-4456-bedd-6ce9a3f68ac7/volumes" Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.693645 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bd81d26-be9f-433b-8ec3-12afd1d85eeb" path="/var/lib/kubelet/pods/3bd81d26-be9f-433b-8ec3-12afd1d85eeb/volumes" Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.694616 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae79285c-f19f-4d6c-9f02-fe5b6d1de88b" path="/var/lib/kubelet/pods/ae79285c-f19f-4d6c-9f02-fe5b6d1de88b/volumes" Dec 03 09:20:33 crc kubenswrapper[4576]: I1203 09:20:33.695464 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9131a32-0712-419c-bf9e-3d3f6b9fa4cc" path="/var/lib/kubelet/pods/d9131a32-0712-419c-bf9e-3d3f6b9fa4cc/volumes" Dec 03 09:20:34 crc kubenswrapper[4576]: I1203 09:20:34.677589 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:20:34 crc kubenswrapper[4576]: E1203 09:20:34.678244 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:20:40 crc kubenswrapper[4576]: I1203 09:20:40.028333 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-l5979"] Dec 03 09:20:40 crc kubenswrapper[4576]: I1203 09:20:40.040590 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-l5979"] Dec 03 09:20:41 crc kubenswrapper[4576]: I1203 09:20:41.052691 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2f64-account-create-update-22w48"] Dec 03 09:20:41 crc kubenswrapper[4576]: I1203 09:20:41.065792 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2f64-account-create-update-22w48"] Dec 03 09:20:41 crc kubenswrapper[4576]: I1203 09:20:41.689894 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30211fb8-a5e9-4d09-ab11-ca3e35b5ba92" path="/var/lib/kubelet/pods/30211fb8-a5e9-4d09-ab11-ca3e35b5ba92/volumes" Dec 03 09:20:41 crc kubenswrapper[4576]: I1203 09:20:41.690598 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbed30d2-7863-4bab-9d8f-56a7b3650b47" path="/var/lib/kubelet/pods/cbed30d2-7863-4bab-9d8f-56a7b3650b47/volumes" Dec 03 09:20:46 crc kubenswrapper[4576]: I1203 09:20:46.677514 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:20:46 crc kubenswrapper[4576]: E1203 09:20:46.678635 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.539754 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.543160 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.552422 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.721347 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.721583 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc9h7\" (UniqueName: \"kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.721638 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.823322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.823483 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc9h7\" (UniqueName: \"kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.823516 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.824088 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.824150 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.844390 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc9h7\" (UniqueName: \"kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7\") pod \"community-operators-flf7g\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:52 crc kubenswrapper[4576]: I1203 09:20:52.878092 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:20:53 crc kubenswrapper[4576]: I1203 09:20:53.341731 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:20:54 crc kubenswrapper[4576]: I1203 09:20:54.196578 4576 generic.go:334] "Generic (PLEG): container finished" podID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerID="d2e5239a407d0766ec499eb40cc30952442c8e5aabcf24d799f687a7282a672b" exitCode=0 Dec 03 09:20:54 crc kubenswrapper[4576]: I1203 09:20:54.196924 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerDied","Data":"d2e5239a407d0766ec499eb40cc30952442c8e5aabcf24d799f687a7282a672b"} Dec 03 09:20:54 crc kubenswrapper[4576]: I1203 09:20:54.196950 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerStarted","Data":"c7ad482bd43fae08de0444931ef7a24a3867c53719344efb7a2dd4ba3815edb3"} Dec 03 09:20:54 crc kubenswrapper[4576]: I1203 09:20:54.199381 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:20:56 crc kubenswrapper[4576]: I1203 09:20:56.217028 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerStarted","Data":"894893bca3fd37c97083d1d289bf9b87ea2973b7615932300274f743a9fc85f0"} Dec 03 09:20:58 crc kubenswrapper[4576]: I1203 09:20:58.234003 4576 generic.go:334] "Generic (PLEG): container finished" podID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerID="894893bca3fd37c97083d1d289bf9b87ea2973b7615932300274f743a9fc85f0" exitCode=0 Dec 03 09:20:58 crc kubenswrapper[4576]: I1203 09:20:58.234054 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerDied","Data":"894893bca3fd37c97083d1d289bf9b87ea2973b7615932300274f743a9fc85f0"} Dec 03 09:20:59 crc kubenswrapper[4576]: I1203 09:20:59.280664 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerStarted","Data":"b58c4d6934eb9dcd24783832e8dd7e392b0d7116b252655f1be4ca1fde35c7c4"} Dec 03 09:20:59 crc kubenswrapper[4576]: I1203 09:20:59.304236 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-flf7g" podStartSLOduration=2.862145018 podStartE2EDuration="7.304217982s" podCreationTimestamp="2025-12-03 09:20:52 +0000 UTC" firstStartedPulling="2025-12-03 09:20:54.199140986 +0000 UTC m=+2461.585117970" lastFinishedPulling="2025-12-03 09:20:58.64121395 +0000 UTC m=+2466.027190934" observedRunningTime="2025-12-03 09:20:59.304200421 +0000 UTC m=+2466.690177405" watchObservedRunningTime="2025-12-03 09:20:59.304217982 +0000 UTC m=+2466.690194966" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.142165 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.144627 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.170365 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.188428 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.188477 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxr6d\" (UniqueName: \"kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.188584 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.290014 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.290061 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxr6d\" (UniqueName: \"kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.290128 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.290839 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.290925 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.323646 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxr6d\" (UniqueName: \"kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d\") pod \"redhat-operators-tf824\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.465263 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:01 crc kubenswrapper[4576]: I1203 09:21:01.678554 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:21:01 crc kubenswrapper[4576]: E1203 09:21:01.679250 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.020139 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.308251 4576 generic.go:334] "Generic (PLEG): container finished" podID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerID="66e182640dc4bc838987be10a2724b5e002cb4a1bfa5d3b1a2466494aad81ab0" exitCode=0 Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.308457 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerDied","Data":"66e182640dc4bc838987be10a2724b5e002cb4a1bfa5d3b1a2466494aad81ab0"} Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.308519 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerStarted","Data":"bb545e54e35cf97a258160b92d70fc4a75de40c2a060eb5e5f6d19e1b59176b5"} Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.878888 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.879174 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:02 crc kubenswrapper[4576]: I1203 09:21:02.932520 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:03 crc kubenswrapper[4576]: I1203 09:21:03.370395 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:04 crc kubenswrapper[4576]: I1203 09:21:04.336304 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerStarted","Data":"5c7c2e8b6c9947765b382beaa3bf31b8bf1fa2b0127beecc46646dd6e4f53062"} Dec 03 09:21:05 crc kubenswrapper[4576]: I1203 09:21:05.318038 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:21:06 crc kubenswrapper[4576]: I1203 09:21:06.052926 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-8763-account-create-update-q2496"] Dec 03 09:21:06 crc kubenswrapper[4576]: I1203 09:21:06.068046 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-8763-account-create-update-q2496"] Dec 03 09:21:06 crc kubenswrapper[4576]: I1203 09:21:06.351246 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-flf7g" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="registry-server" containerID="cri-o://b58c4d6934eb9dcd24783832e8dd7e392b0d7116b252655f1be4ca1fde35c7c4" gracePeriod=2 Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.380476 4576 generic.go:334] "Generic (PLEG): container finished" podID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerID="b58c4d6934eb9dcd24783832e8dd7e392b0d7116b252655f1be4ca1fde35c7c4" exitCode=0 Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.380550 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerDied","Data":"b58c4d6934eb9dcd24783832e8dd7e392b0d7116b252655f1be4ca1fde35c7c4"} Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.628135 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.693750 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce003078-de4d-4722-9e0d-3d2570c008cc" path="/var/lib/kubelet/pods/ce003078-de4d-4722-9e0d-3d2570c008cc/volumes" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.778837 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content\") pod \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.778964 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc9h7\" (UniqueName: \"kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7\") pod \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.779143 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities\") pod \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\" (UID: \"460ecbc4-a5e2-453f-82da-7ba3d090da0b\") " Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.780513 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities" (OuterVolumeSpecName: "utilities") pod "460ecbc4-a5e2-453f-82da-7ba3d090da0b" (UID: "460ecbc4-a5e2-453f-82da-7ba3d090da0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.781574 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.822415 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "460ecbc4-a5e2-453f-82da-7ba3d090da0b" (UID: "460ecbc4-a5e2-453f-82da-7ba3d090da0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.883111 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/460ecbc4-a5e2-453f-82da-7ba3d090da0b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.969420 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7" (OuterVolumeSpecName: "kube-api-access-jc9h7") pod "460ecbc4-a5e2-453f-82da-7ba3d090da0b" (UID: "460ecbc4-a5e2-453f-82da-7ba3d090da0b"). InnerVolumeSpecName "kube-api-access-jc9h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:21:07 crc kubenswrapper[4576]: I1203 09:21:07.985539 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc9h7\" (UniqueName: \"kubernetes.io/projected/460ecbc4-a5e2-453f-82da-7ba3d090da0b-kube-api-access-jc9h7\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.391258 4576 generic.go:334] "Generic (PLEG): container finished" podID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerID="5c7c2e8b6c9947765b382beaa3bf31b8bf1fa2b0127beecc46646dd6e4f53062" exitCode=0 Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.391324 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerDied","Data":"5c7c2e8b6c9947765b382beaa3bf31b8bf1fa2b0127beecc46646dd6e4f53062"} Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.418237 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-flf7g" event={"ID":"460ecbc4-a5e2-453f-82da-7ba3d090da0b","Type":"ContainerDied","Data":"c7ad482bd43fae08de0444931ef7a24a3867c53719344efb7a2dd4ba3815edb3"} Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.418323 4576 scope.go:117] "RemoveContainer" containerID="b58c4d6934eb9dcd24783832e8dd7e392b0d7116b252655f1be4ca1fde35c7c4" Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.418377 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-flf7g" Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.453269 4576 scope.go:117] "RemoveContainer" containerID="894893bca3fd37c97083d1d289bf9b87ea2973b7615932300274f743a9fc85f0" Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.476198 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.484751 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-flf7g"] Dec 03 09:21:08 crc kubenswrapper[4576]: I1203 09:21:08.507480 4576 scope.go:117] "RemoveContainer" containerID="d2e5239a407d0766ec499eb40cc30952442c8e5aabcf24d799f687a7282a672b" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.047479 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-vwggs"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.057487 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-8s9cm"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.066604 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-cf28-account-create-update-4g5wv"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.074084 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-gmdx8"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.085278 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-vwggs"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.094128 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-gmdx8"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.103201 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-917b-account-create-update-5fjkf"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.112880 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-917b-account-create-update-5fjkf"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.121323 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-cf28-account-create-update-4g5wv"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.130322 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-8s9cm"] Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.688438 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe42276-9012-4675-8196-28962d473856" path="/var/lib/kubelet/pods/0fe42276-9012-4675-8196-28962d473856/volumes" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.689382 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" path="/var/lib/kubelet/pods/460ecbc4-a5e2-453f-82da-7ba3d090da0b/volumes" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.690057 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54df14dc-89cd-4940-aed5-0ba757f5294c" path="/var/lib/kubelet/pods/54df14dc-89cd-4940-aed5-0ba757f5294c/volumes" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.691654 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8d27ef6-3306-410d-96ee-0648446c759f" path="/var/lib/kubelet/pods/a8d27ef6-3306-410d-96ee-0648446c759f/volumes" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.692246 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc770216-f893-46e0-8e36-1c60a1094e82" path="/var/lib/kubelet/pods/bc770216-f893-46e0-8e36-1c60a1094e82/volumes" Dec 03 09:21:09 crc kubenswrapper[4576]: I1203 09:21:09.692904 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef9dad13-73f8-4391-b2e8-3a043cff74c0" path="/var/lib/kubelet/pods/ef9dad13-73f8-4391-b2e8-3a043cff74c0/volumes" Dec 03 09:21:10 crc kubenswrapper[4576]: I1203 09:21:10.439669 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerStarted","Data":"b9392c891a5d28ccde506deda700426aad80bc9f34d40b326219146b4bbe5e07"} Dec 03 09:21:10 crc kubenswrapper[4576]: I1203 09:21:10.481083 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tf824" podStartSLOduration=2.201359094 podStartE2EDuration="9.481062438s" podCreationTimestamp="2025-12-03 09:21:01 +0000 UTC" firstStartedPulling="2025-12-03 09:21:02.309881787 +0000 UTC m=+2469.695858771" lastFinishedPulling="2025-12-03 09:21:09.589585131 +0000 UTC m=+2476.975562115" observedRunningTime="2025-12-03 09:21:10.476292989 +0000 UTC m=+2477.862269973" watchObservedRunningTime="2025-12-03 09:21:10.481062438 +0000 UTC m=+2477.867039422" Dec 03 09:21:11 crc kubenswrapper[4576]: I1203 09:21:11.466174 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:11 crc kubenswrapper[4576]: I1203 09:21:11.466754 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:12 crc kubenswrapper[4576]: I1203 09:21:12.526312 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tf824" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="registry-server" probeResult="failure" output=< Dec 03 09:21:12 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:21:12 crc kubenswrapper[4576]: > Dec 03 09:21:15 crc kubenswrapper[4576]: I1203 09:21:15.678708 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:21:15 crc kubenswrapper[4576]: E1203 09:21:15.678938 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:21:21 crc kubenswrapper[4576]: I1203 09:21:21.515414 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:21 crc kubenswrapper[4576]: I1203 09:21:21.581363 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:21 crc kubenswrapper[4576]: I1203 09:21:21.769229 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:22 crc kubenswrapper[4576]: I1203 09:21:22.555059 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tf824" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="registry-server" containerID="cri-o://b9392c891a5d28ccde506deda700426aad80bc9f34d40b326219146b4bbe5e07" gracePeriod=2 Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.577135 4576 generic.go:334] "Generic (PLEG): container finished" podID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerID="b9392c891a5d28ccde506deda700426aad80bc9f34d40b326219146b4bbe5e07" exitCode=0 Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.577195 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerDied","Data":"b9392c891a5d28ccde506deda700426aad80bc9f34d40b326219146b4bbe5e07"} Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.813088 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.885477 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities\") pod \"2621f474-2b9e-4be4-ac20-a794cc2a3920\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.885610 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content\") pod \"2621f474-2b9e-4be4-ac20-a794cc2a3920\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.885633 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxr6d\" (UniqueName: \"kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d\") pod \"2621f474-2b9e-4be4-ac20-a794cc2a3920\" (UID: \"2621f474-2b9e-4be4-ac20-a794cc2a3920\") " Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.886141 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities" (OuterVolumeSpecName: "utilities") pod "2621f474-2b9e-4be4-ac20-a794cc2a3920" (UID: "2621f474-2b9e-4be4-ac20-a794cc2a3920"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.894728 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d" (OuterVolumeSpecName: "kube-api-access-cxr6d") pod "2621f474-2b9e-4be4-ac20-a794cc2a3920" (UID: "2621f474-2b9e-4be4-ac20-a794cc2a3920"). InnerVolumeSpecName "kube-api-access-cxr6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.986572 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxr6d\" (UniqueName: \"kubernetes.io/projected/2621f474-2b9e-4be4-ac20-a794cc2a3920-kube-api-access-cxr6d\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:23 crc kubenswrapper[4576]: I1203 09:21:23.986601 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.006841 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2621f474-2b9e-4be4-ac20-a794cc2a3920" (UID: "2621f474-2b9e-4be4-ac20-a794cc2a3920"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.088236 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2621f474-2b9e-4be4-ac20-a794cc2a3920-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.590213 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tf824" event={"ID":"2621f474-2b9e-4be4-ac20-a794cc2a3920","Type":"ContainerDied","Data":"bb545e54e35cf97a258160b92d70fc4a75de40c2a060eb5e5f6d19e1b59176b5"} Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.590658 4576 scope.go:117] "RemoveContainer" containerID="b9392c891a5d28ccde506deda700426aad80bc9f34d40b326219146b4bbe5e07" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.590247 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tf824" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.637083 4576 scope.go:117] "RemoveContainer" containerID="5c7c2e8b6c9947765b382beaa3bf31b8bf1fa2b0127beecc46646dd6e4f53062" Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.637283 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.648074 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tf824"] Dec 03 09:21:24 crc kubenswrapper[4576]: I1203 09:21:24.660394 4576 scope.go:117] "RemoveContainer" containerID="66e182640dc4bc838987be10a2724b5e002cb4a1bfa5d3b1a2466494aad81ab0" Dec 03 09:21:25 crc kubenswrapper[4576]: I1203 09:21:25.707080 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" path="/var/lib/kubelet/pods/2621f474-2b9e-4be4-ac20-a794cc2a3920/volumes" Dec 03 09:21:29 crc kubenswrapper[4576]: I1203 09:21:29.677429 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:21:29 crc kubenswrapper[4576]: E1203 09:21:29.677719 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.039492 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xpx9m"] Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.047423 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xpx9m"] Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.449543 4576 scope.go:117] "RemoveContainer" containerID="081af4196e1a1809140be15b136e39ea2e28ed36c4b6794ba5befae71b96d392" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.483550 4576 scope.go:117] "RemoveContainer" containerID="500a050716ad1d31ca83b98d720f66ad8f993bf146e0706c19465c307b57439d" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.531692 4576 scope.go:117] "RemoveContainer" containerID="b7fb0b79388d2b032c02ea9d3b5ba3bf2b6329c37f653c7f2ea45d59da08ba36" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.572618 4576 scope.go:117] "RemoveContainer" containerID="1f212dd4d2bee4f19bfb6b7ff536eb6d4141d5b26ba895a23414ff6104c6f4ef" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.623256 4576 scope.go:117] "RemoveContainer" containerID="67c805e03d4cdae5ff7e70e7aa7833be737d40fb4b9666116427425c319dc94c" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.676937 4576 scope.go:117] "RemoveContainer" containerID="657cd9f7ffccc31161afffbfea5a81a1f112d5becb3d275cc47c207def3aaa2d" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.715512 4576 scope.go:117] "RemoveContainer" containerID="d86147927e5661b419d74987e053f107d1833539ef35c68f7db3f0456f2fef9f" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.735900 4576 scope.go:117] "RemoveContainer" containerID="ffb52e41f74d7e9b5eaaef727bc32875c9aff05b43fe48166609403a91341fa8" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.759975 4576 scope.go:117] "RemoveContainer" containerID="f1caa22025532f0bd1b4f5cb091802b821fe7db9c3223c8c3761b48579ed77ba" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.783333 4576 scope.go:117] "RemoveContainer" containerID="dc07261f74649d362d3351fac984da289f993337fc38e99219bccd5eedea564e" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.803570 4576 scope.go:117] "RemoveContainer" containerID="f7eb83e5597f9f67358b7a9851c89a69975f992eec0384bc07aa412b0ab15a36" Dec 03 09:21:30 crc kubenswrapper[4576]: I1203 09:21:30.825600 4576 scope.go:117] "RemoveContainer" containerID="2b88a3596a300fdd6c4f6641b9ef789e1d5f6985788947d6f60437277f131594" Dec 03 09:21:31 crc kubenswrapper[4576]: I1203 09:21:31.714767 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9361513a-8494-48cb-9d68-43a57f2d679f" path="/var/lib/kubelet/pods/9361513a-8494-48cb-9d68-43a57f2d679f/volumes" Dec 03 09:21:40 crc kubenswrapper[4576]: I1203 09:21:40.677760 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:21:40 crc kubenswrapper[4576]: E1203 09:21:40.678451 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:21:54 crc kubenswrapper[4576]: I1203 09:21:54.044577 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-9dp49"] Dec 03 09:21:54 crc kubenswrapper[4576]: I1203 09:21:54.057389 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-9dp49"] Dec 03 09:21:54 crc kubenswrapper[4576]: I1203 09:21:54.677178 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:21:54 crc kubenswrapper[4576]: E1203 09:21:54.677421 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:21:55 crc kubenswrapper[4576]: I1203 09:21:55.690685 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fd5058c-a47f-46cd-b7a7-d6d02014da6e" path="/var/lib/kubelet/pods/9fd5058c-a47f-46cd-b7a7-d6d02014da6e/volumes" Dec 03 09:22:04 crc kubenswrapper[4576]: I1203 09:22:04.986401 4576 generic.go:334] "Generic (PLEG): container finished" podID="a1b39b7c-1acb-467a-904c-7ee77350804b" containerID="26c9ab3450b073ae54393d95508b76c068e1a1e40ebbe874ae728177e88433dd" exitCode=0 Dec 03 09:22:04 crc kubenswrapper[4576]: I1203 09:22:04.987241 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" event={"ID":"a1b39b7c-1acb-467a-904c-7ee77350804b","Type":"ContainerDied","Data":"26c9ab3450b073ae54393d95508b76c068e1a1e40ebbe874ae728177e88433dd"} Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.429040 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.617554 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhr7t\" (UniqueName: \"kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t\") pod \"a1b39b7c-1acb-467a-904c-7ee77350804b\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.617602 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key\") pod \"a1b39b7c-1acb-467a-904c-7ee77350804b\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.617686 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory\") pod \"a1b39b7c-1acb-467a-904c-7ee77350804b\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.617801 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle\") pod \"a1b39b7c-1acb-467a-904c-7ee77350804b\" (UID: \"a1b39b7c-1acb-467a-904c-7ee77350804b\") " Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.624933 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t" (OuterVolumeSpecName: "kube-api-access-mhr7t") pod "a1b39b7c-1acb-467a-904c-7ee77350804b" (UID: "a1b39b7c-1acb-467a-904c-7ee77350804b"). InnerVolumeSpecName "kube-api-access-mhr7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.628656 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a1b39b7c-1acb-467a-904c-7ee77350804b" (UID: "a1b39b7c-1acb-467a-904c-7ee77350804b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.655137 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a1b39b7c-1acb-467a-904c-7ee77350804b" (UID: "a1b39b7c-1acb-467a-904c-7ee77350804b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.657412 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory" (OuterVolumeSpecName: "inventory") pod "a1b39b7c-1acb-467a-904c-7ee77350804b" (UID: "a1b39b7c-1acb-467a-904c-7ee77350804b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.677649 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:22:06 crc kubenswrapper[4576]: E1203 09:22:06.677862 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.720811 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhr7t\" (UniqueName: \"kubernetes.io/projected/a1b39b7c-1acb-467a-904c-7ee77350804b-kube-api-access-mhr7t\") on node \"crc\" DevicePath \"\"" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.721170 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.721218 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:22:06 crc kubenswrapper[4576]: I1203 09:22:06.721231 4576 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b39b7c-1acb-467a-904c-7ee77350804b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.061311 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" event={"ID":"a1b39b7c-1acb-467a-904c-7ee77350804b","Type":"ContainerDied","Data":"b1949cd552b6c40b1821e4d1b935764ee3c1460eb0e71273dffe7880563e4567"} Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.061367 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1949cd552b6c40b1821e4d1b935764ee3c1460eb0e71273dffe7880563e4567" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.061474 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.194339 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7"] Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.194876 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.194901 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.194918 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="extract-content" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.194926 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="extract-content" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.194949 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="extract-utilities" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.194957 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="extract-utilities" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.194972 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="extract-content" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.194980 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="extract-content" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.194995 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="extract-utilities" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195003 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="extract-utilities" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.195031 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b39b7c-1acb-467a-904c-7ee77350804b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195040 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b39b7c-1acb-467a-904c-7ee77350804b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 09:22:07 crc kubenswrapper[4576]: E1203 09:22:07.195067 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195075 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195293 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b39b7c-1acb-467a-904c-7ee77350804b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195324 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2621f474-2b9e-4be4-ac20-a794cc2a3920" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.195337 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="460ecbc4-a5e2-453f-82da-7ba3d090da0b" containerName="registry-server" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.196195 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.200795 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.200822 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.200898 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.201240 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.220086 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7"] Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.340451 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.340518 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.340604 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4gq2\" (UniqueName: \"kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.442666 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.442727 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4gq2\" (UniqueName: \"kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.442900 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.446547 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.450412 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.461061 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4gq2\" (UniqueName: \"kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:07 crc kubenswrapper[4576]: I1203 09:22:07.514815 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:22:08 crc kubenswrapper[4576]: I1203 09:22:08.051332 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7"] Dec 03 09:22:08 crc kubenswrapper[4576]: I1203 09:22:08.076806 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" event={"ID":"fc6a72d3-31b7-461d-82f3-09536b77f9e6","Type":"ContainerStarted","Data":"5f2b95d5f6d315757105d254037b1decb484725d115170e7a2d9ca673811481e"} Dec 03 09:22:09 crc kubenswrapper[4576]: I1203 09:22:09.089876 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" event={"ID":"fc6a72d3-31b7-461d-82f3-09536b77f9e6","Type":"ContainerStarted","Data":"00dbfd68a348bee4a875ec057e27c1b50ae0d9a4ac04d2bfe6dcf4c809ddaf04"} Dec 03 09:22:09 crc kubenswrapper[4576]: I1203 09:22:09.109464 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" podStartSLOduration=1.926279028 podStartE2EDuration="2.109445966s" podCreationTimestamp="2025-12-03 09:22:07 +0000 UTC" firstStartedPulling="2025-12-03 09:22:08.060173729 +0000 UTC m=+2535.446150703" lastFinishedPulling="2025-12-03 09:22:08.243340657 +0000 UTC m=+2535.629317641" observedRunningTime="2025-12-03 09:22:09.105263392 +0000 UTC m=+2536.491240376" watchObservedRunningTime="2025-12-03 09:22:09.109445966 +0000 UTC m=+2536.495422950" Dec 03 09:22:17 crc kubenswrapper[4576]: I1203 09:22:17.677131 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:22:17 crc kubenswrapper[4576]: E1203 09:22:17.677883 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:22:30 crc kubenswrapper[4576]: I1203 09:22:30.677177 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:22:30 crc kubenswrapper[4576]: E1203 09:22:30.677878 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:22:31 crc kubenswrapper[4576]: I1203 09:22:31.774426 4576 scope.go:117] "RemoveContainer" containerID="21b7c84bfa993c1020261ed4d65627e18ca78a8bacf0338a593c23d5e573528f" Dec 03 09:22:31 crc kubenswrapper[4576]: I1203 09:22:31.835844 4576 scope.go:117] "RemoveContainer" containerID="d884e6620ba50c7b8e22a8c30fd33c77f64687e765c97885ee49fb58d9ec5a2f" Dec 03 09:22:43 crc kubenswrapper[4576]: I1203 09:22:43.705110 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:22:43 crc kubenswrapper[4576]: E1203 09:22:43.710165 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:22:44 crc kubenswrapper[4576]: I1203 09:22:44.041758 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-w66xd"] Dec 03 09:22:44 crc kubenswrapper[4576]: I1203 09:22:44.052372 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-w66xd"] Dec 03 09:22:45 crc kubenswrapper[4576]: I1203 09:22:45.034777 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-2njh4"] Dec 03 09:22:45 crc kubenswrapper[4576]: I1203 09:22:45.050381 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-2njh4"] Dec 03 09:22:45 crc kubenswrapper[4576]: I1203 09:22:45.692274 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07" path="/var/lib/kubelet/pods/bdf20d6b-f4e4-4c12-bb68-1ea7ed759a07/volumes" Dec 03 09:22:45 crc kubenswrapper[4576]: I1203 09:22:45.694229 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdef871a-e0bf-42eb-b9d5-bcf0777fbec4" path="/var/lib/kubelet/pods/cdef871a-e0bf-42eb-b9d5-bcf0777fbec4/volumes" Dec 03 09:22:51 crc kubenswrapper[4576]: I1203 09:22:51.043799 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-qwbz9"] Dec 03 09:22:51 crc kubenswrapper[4576]: I1203 09:22:51.052887 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-qwbz9"] Dec 03 09:22:51 crc kubenswrapper[4576]: I1203 09:22:51.689205 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1b7c2fb-e839-4698-8319-3f8eae6e46d6" path="/var/lib/kubelet/pods/a1b7c2fb-e839-4698-8319-3f8eae6e46d6/volumes" Dec 03 09:22:58 crc kubenswrapper[4576]: I1203 09:22:58.677669 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:22:58 crc kubenswrapper[4576]: E1203 09:22:58.678504 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:23:00 crc kubenswrapper[4576]: I1203 09:23:00.039699 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-988np"] Dec 03 09:23:00 crc kubenswrapper[4576]: I1203 09:23:00.048913 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-988np"] Dec 03 09:23:01 crc kubenswrapper[4576]: I1203 09:23:01.080290 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-qqg9g"] Dec 03 09:23:01 crc kubenswrapper[4576]: I1203 09:23:01.091741 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-qqg9g"] Dec 03 09:23:01 crc kubenswrapper[4576]: I1203 09:23:01.687368 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="047a7c95-c4e3-46fa-8b1b-2a351992493e" path="/var/lib/kubelet/pods/047a7c95-c4e3-46fa-8b1b-2a351992493e/volumes" Dec 03 09:23:01 crc kubenswrapper[4576]: I1203 09:23:01.688593 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="437286d3-1147-43ef-945f-8612d1610427" path="/var/lib/kubelet/pods/437286d3-1147-43ef-945f-8612d1610427/volumes" Dec 03 09:23:13 crc kubenswrapper[4576]: I1203 09:23:13.688156 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:23:14 crc kubenswrapper[4576]: I1203 09:23:14.686425 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3"} Dec 03 09:23:31 crc kubenswrapper[4576]: I1203 09:23:31.921663 4576 scope.go:117] "RemoveContainer" containerID="1c9aba1e8d982524aae075138b9deb785c987072ab1406b17dab3ab5d8338280" Dec 03 09:23:31 crc kubenswrapper[4576]: I1203 09:23:31.972882 4576 scope.go:117] "RemoveContainer" containerID="3da15af9b8e6b4bca051ee591606a6e45ab32d9dcae3bf0a80c6ecd25215d4ed" Dec 03 09:23:32 crc kubenswrapper[4576]: I1203 09:23:32.006815 4576 scope.go:117] "RemoveContainer" containerID="46008041e9328f5c70ea5d61480b89615e431713d233332f8517865e11ec2e38" Dec 03 09:23:32 crc kubenswrapper[4576]: I1203 09:23:32.053869 4576 scope.go:117] "RemoveContainer" containerID="d1d90449a4995f5ce23629369d06b9e4e6817aa4c307f6795c657b448f4718da" Dec 03 09:23:32 crc kubenswrapper[4576]: I1203 09:23:32.123282 4576 scope.go:117] "RemoveContainer" containerID="763ab8a21ac3fe408c84d9db83465d682f64fd4ed25ba98019c21823b4135d4a" Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.057177 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-8hs7j"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.077404 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ddfc-account-create-update-55b6r"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.088415 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5ba3-account-create-update-hkhpv"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.096965 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-p9nqp"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.103559 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-8hs7j"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.110089 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5ba3-account-create-update-hkhpv"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.116220 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-p9nqp"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.122728 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-s4j5r"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.136916 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1cc4-account-create-update-ltg7z"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.144291 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ddfc-account-create-update-55b6r"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.152627 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-s4j5r"] Dec 03 09:24:00 crc kubenswrapper[4576]: I1203 09:24:00.159746 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1cc4-account-create-update-ltg7z"] Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.692575 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50764171-616e-4be8-b380-a5923d51773a" path="/var/lib/kubelet/pods/50764171-616e-4be8-b380-a5923d51773a/volumes" Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.693410 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f7791e-f5ad-4d75-b250-d584031bc805" path="/var/lib/kubelet/pods/66f7791e-f5ad-4d75-b250-d584031bc805/volumes" Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.694810 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9878d95a-72e1-456b-aee3-f0bf43432249" path="/var/lib/kubelet/pods/9878d95a-72e1-456b-aee3-f0bf43432249/volumes" Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.695951 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af864d88-4394-47c6-883a-85d47c2a6832" path="/var/lib/kubelet/pods/af864d88-4394-47c6-883a-85d47c2a6832/volumes" Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.697283 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c224894c-4c8f-488a-a01e-69e2f721f329" path="/var/lib/kubelet/pods/c224894c-4c8f-488a-a01e-69e2f721f329/volumes" Dec 03 09:24:01 crc kubenswrapper[4576]: I1203 09:24:01.698052 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a" path="/var/lib/kubelet/pods/f8ce720b-ea32-4d75-b6f7-7adf32a6ed6a/volumes" Dec 03 09:24:15 crc kubenswrapper[4576]: I1203 09:24:15.349485 4576 generic.go:334] "Generic (PLEG): container finished" podID="fc6a72d3-31b7-461d-82f3-09536b77f9e6" containerID="00dbfd68a348bee4a875ec057e27c1b50ae0d9a4ac04d2bfe6dcf4c809ddaf04" exitCode=0 Dec 03 09:24:15 crc kubenswrapper[4576]: I1203 09:24:15.349563 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" event={"ID":"fc6a72d3-31b7-461d-82f3-09536b77f9e6","Type":"ContainerDied","Data":"00dbfd68a348bee4a875ec057e27c1b50ae0d9a4ac04d2bfe6dcf4c809ddaf04"} Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.828831 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.905619 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key\") pod \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.905743 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory\") pod \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.905788 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4gq2\" (UniqueName: \"kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2\") pod \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\" (UID: \"fc6a72d3-31b7-461d-82f3-09536b77f9e6\") " Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.911626 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2" (OuterVolumeSpecName: "kube-api-access-s4gq2") pod "fc6a72d3-31b7-461d-82f3-09536b77f9e6" (UID: "fc6a72d3-31b7-461d-82f3-09536b77f9e6"). InnerVolumeSpecName "kube-api-access-s4gq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.945256 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fc6a72d3-31b7-461d-82f3-09536b77f9e6" (UID: "fc6a72d3-31b7-461d-82f3-09536b77f9e6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:24:16 crc kubenswrapper[4576]: I1203 09:24:16.954782 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory" (OuterVolumeSpecName: "inventory") pod "fc6a72d3-31b7-461d-82f3-09536b77f9e6" (UID: "fc6a72d3-31b7-461d-82f3-09536b77f9e6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.009827 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.009857 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6a72d3-31b7-461d-82f3-09536b77f9e6-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.009878 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4gq2\" (UniqueName: \"kubernetes.io/projected/fc6a72d3-31b7-461d-82f3-09536b77f9e6-kube-api-access-s4gq2\") on node \"crc\" DevicePath \"\"" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.375584 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" event={"ID":"fc6a72d3-31b7-461d-82f3-09536b77f9e6","Type":"ContainerDied","Data":"5f2b95d5f6d315757105d254037b1decb484725d115170e7a2d9ca673811481e"} Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.375631 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.375649 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f2b95d5f6d315757105d254037b1decb484725d115170e7a2d9ca673811481e" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.641905 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm"] Dec 03 09:24:17 crc kubenswrapper[4576]: E1203 09:24:17.642622 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6a72d3-31b7-461d-82f3-09536b77f9e6" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.642642 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6a72d3-31b7-461d-82f3-09536b77f9e6" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.642837 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc6a72d3-31b7-461d-82f3-09536b77f9e6" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.643454 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.645480 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.645873 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.647075 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.647315 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.673163 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm"] Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.743061 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.743192 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.743222 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs6rx\" (UniqueName: \"kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.844870 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.844927 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs6rx\" (UniqueName: \"kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.845033 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.848604 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.863287 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.865677 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs6rx\" (UniqueName: \"kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:17 crc kubenswrapper[4576]: I1203 09:24:17.959325 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:24:18 crc kubenswrapper[4576]: I1203 09:24:18.518825 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm"] Dec 03 09:24:19 crc kubenswrapper[4576]: I1203 09:24:19.393980 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" event={"ID":"308d4747-e427-4848-8961-a21d39dbd449","Type":"ContainerStarted","Data":"2bc94e6c528615269ea253a45f7d7267b3790c09b1a2ae9d2446b255ee2e1d16"} Dec 03 09:24:19 crc kubenswrapper[4576]: I1203 09:24:19.394305 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" event={"ID":"308d4747-e427-4848-8961-a21d39dbd449","Type":"ContainerStarted","Data":"c57990f4ca42e11c34f5b99b51cb0c30b9a3fb968f8a05ac6a0b9a06af373916"} Dec 03 09:24:19 crc kubenswrapper[4576]: I1203 09:24:19.411347 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" podStartSLOduration=2.19556131 podStartE2EDuration="2.411314911s" podCreationTimestamp="2025-12-03 09:24:17 +0000 UTC" firstStartedPulling="2025-12-03 09:24:18.506642886 +0000 UTC m=+2665.892619890" lastFinishedPulling="2025-12-03 09:24:18.722396517 +0000 UTC m=+2666.108373491" observedRunningTime="2025-12-03 09:24:19.40758539 +0000 UTC m=+2666.793562384" watchObservedRunningTime="2025-12-03 09:24:19.411314911 +0000 UTC m=+2666.797291935" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.301812 4576 scope.go:117] "RemoveContainer" containerID="70b9250dc34f79d6e4d41960573875bf9e59c0987fa78766142d3cbe1a522085" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.354690 4576 scope.go:117] "RemoveContainer" containerID="b45b3dd0b3196523db64d7cab40faddcb4ff25c93c2abad8317748f50d6d3f53" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.386101 4576 scope.go:117] "RemoveContainer" containerID="7c71269ee9bf011cb643c487f7a03babe292deef523a9a0c9342dfd761f803a8" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.432782 4576 scope.go:117] "RemoveContainer" containerID="224e9b277c7f51f16558f795a6c8b95f9336adb8a6715337ed270828c032601a" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.475087 4576 scope.go:117] "RemoveContainer" containerID="a52d278b4603a4a58597ce7e81dcac7013f927bfa9a6b2e1e418981934a4929a" Dec 03 09:24:32 crc kubenswrapper[4576]: I1203 09:24:32.521897 4576 scope.go:117] "RemoveContainer" containerID="b3b0dd228776a90a37239c3bb7ca32bf2ce3e8d8f05f4691318c7669c7c8ff4b" Dec 03 09:24:51 crc kubenswrapper[4576]: I1203 09:24:51.047303 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8v6tp"] Dec 03 09:24:51 crc kubenswrapper[4576]: I1203 09:24:51.063103 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-8v6tp"] Dec 03 09:24:51 crc kubenswrapper[4576]: I1203 09:24:51.689599 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fad4d6c-1741-4e92-b5c1-26e939b500df" path="/var/lib/kubelet/pods/4fad4d6c-1741-4e92-b5c1-26e939b500df/volumes" Dec 03 09:25:15 crc kubenswrapper[4576]: I1203 09:25:15.034923 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-fm89l"] Dec 03 09:25:15 crc kubenswrapper[4576]: I1203 09:25:15.045643 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-fm89l"] Dec 03 09:25:15 crc kubenswrapper[4576]: I1203 09:25:15.692719 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24" path="/var/lib/kubelet/pods/66f27cff-ed0c-4b6f-b0f5-d42b42fbeb24/volumes" Dec 03 09:25:16 crc kubenswrapper[4576]: I1203 09:25:16.030025 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bnv8z"] Dec 03 09:25:16 crc kubenswrapper[4576]: I1203 09:25:16.041361 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bnv8z"] Dec 03 09:25:17 crc kubenswrapper[4576]: I1203 09:25:17.695878 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c88ef9a8-712f-430d-859e-3acbf244c2b9" path="/var/lib/kubelet/pods/c88ef9a8-712f-430d-859e-3acbf244c2b9/volumes" Dec 03 09:25:32 crc kubenswrapper[4576]: I1203 09:25:32.656256 4576 scope.go:117] "RemoveContainer" containerID="740357c445c00011968b252024d87485977eeccd7e173d9004c382b862d7c8ed" Dec 03 09:25:32 crc kubenswrapper[4576]: I1203 09:25:32.694702 4576 scope.go:117] "RemoveContainer" containerID="038ef87ea9073e8d8641b0f5a757e272141bd0fda3ef2096348e3e29fd319af6" Dec 03 09:25:32 crc kubenswrapper[4576]: I1203 09:25:32.762924 4576 scope.go:117] "RemoveContainer" containerID="9c208cd42b265841f6ca8b227674dc625009260199fdd77dbf2b73c5e73e86f7" Dec 03 09:25:39 crc kubenswrapper[4576]: I1203 09:25:39.680638 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:25:39 crc kubenswrapper[4576]: I1203 09:25:39.681093 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:25:40 crc kubenswrapper[4576]: I1203 09:25:40.118656 4576 generic.go:334] "Generic (PLEG): container finished" podID="308d4747-e427-4848-8961-a21d39dbd449" containerID="2bc94e6c528615269ea253a45f7d7267b3790c09b1a2ae9d2446b255ee2e1d16" exitCode=0 Dec 03 09:25:40 crc kubenswrapper[4576]: I1203 09:25:40.118709 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" event={"ID":"308d4747-e427-4848-8961-a21d39dbd449","Type":"ContainerDied","Data":"2bc94e6c528615269ea253a45f7d7267b3790c09b1a2ae9d2446b255ee2e1d16"} Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.561573 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.653751 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory\") pod \"308d4747-e427-4848-8961-a21d39dbd449\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.654317 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key\") pod \"308d4747-e427-4848-8961-a21d39dbd449\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.654501 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs6rx\" (UniqueName: \"kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx\") pod \"308d4747-e427-4848-8961-a21d39dbd449\" (UID: \"308d4747-e427-4848-8961-a21d39dbd449\") " Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.659986 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx" (OuterVolumeSpecName: "kube-api-access-cs6rx") pod "308d4747-e427-4848-8961-a21d39dbd449" (UID: "308d4747-e427-4848-8961-a21d39dbd449"). InnerVolumeSpecName "kube-api-access-cs6rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.698751 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "308d4747-e427-4848-8961-a21d39dbd449" (UID: "308d4747-e427-4848-8961-a21d39dbd449"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.703648 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory" (OuterVolumeSpecName: "inventory") pod "308d4747-e427-4848-8961-a21d39dbd449" (UID: "308d4747-e427-4848-8961-a21d39dbd449"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.757019 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.757052 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/308d4747-e427-4848-8961-a21d39dbd449-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:41 crc kubenswrapper[4576]: I1203 09:25:41.757065 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs6rx\" (UniqueName: \"kubernetes.io/projected/308d4747-e427-4848-8961-a21d39dbd449-kube-api-access-cs6rx\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.139262 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" event={"ID":"308d4747-e427-4848-8961-a21d39dbd449","Type":"ContainerDied","Data":"c57990f4ca42e11c34f5b99b51cb0c30b9a3fb968f8a05ac6a0b9a06af373916"} Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.139302 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c57990f4ca42e11c34f5b99b51cb0c30b9a3fb968f8a05ac6a0b9a06af373916" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.139353 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.242384 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg"] Dec 03 09:25:42 crc kubenswrapper[4576]: E1203 09:25:42.242805 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="308d4747-e427-4848-8961-a21d39dbd449" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.242823 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="308d4747-e427-4848-8961-a21d39dbd449" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.242998 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="308d4747-e427-4848-8961-a21d39dbd449" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.243696 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.249047 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.249927 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.250014 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.250149 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.261985 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg"] Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.393171 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.393223 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.393372 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx2fh\" (UniqueName: \"kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.494519 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.494595 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.494766 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx2fh\" (UniqueName: \"kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.502138 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.502938 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.512984 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx2fh\" (UniqueName: \"kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:42 crc kubenswrapper[4576]: I1203 09:25:42.561079 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:43 crc kubenswrapper[4576]: I1203 09:25:43.111242 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg"] Dec 03 09:25:43 crc kubenswrapper[4576]: I1203 09:25:43.149008 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" event={"ID":"da77edd1-65a1-4f59-a4d3-e57679ae6acf","Type":"ContainerStarted","Data":"87e380d559d4fde1dc160b4fcd9b995e028cb619ae8cb64ac5259fb1ac8e848c"} Dec 03 09:25:44 crc kubenswrapper[4576]: I1203 09:25:44.183353 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" event={"ID":"da77edd1-65a1-4f59-a4d3-e57679ae6acf","Type":"ContainerStarted","Data":"32ef55f316e57d3d947f1f607d55bd3bc9af3b362d9e277fa103ec4970fd6186"} Dec 03 09:25:44 crc kubenswrapper[4576]: I1203 09:25:44.216834 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" podStartSLOduration=2.02500819 podStartE2EDuration="2.21681033s" podCreationTimestamp="2025-12-03 09:25:42 +0000 UTC" firstStartedPulling="2025-12-03 09:25:43.114905676 +0000 UTC m=+2750.500882660" lastFinishedPulling="2025-12-03 09:25:43.306707816 +0000 UTC m=+2750.692684800" observedRunningTime="2025-12-03 09:25:44.203750578 +0000 UTC m=+2751.589727562" watchObservedRunningTime="2025-12-03 09:25:44.21681033 +0000 UTC m=+2751.602787314" Dec 03 09:25:49 crc kubenswrapper[4576]: I1203 09:25:49.223217 4576 generic.go:334] "Generic (PLEG): container finished" podID="da77edd1-65a1-4f59-a4d3-e57679ae6acf" containerID="32ef55f316e57d3d947f1f607d55bd3bc9af3b362d9e277fa103ec4970fd6186" exitCode=0 Dec 03 09:25:49 crc kubenswrapper[4576]: I1203 09:25:49.223316 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" event={"ID":"da77edd1-65a1-4f59-a4d3-e57679ae6acf","Type":"ContainerDied","Data":"32ef55f316e57d3d947f1f607d55bd3bc9af3b362d9e277fa103ec4970fd6186"} Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.715682 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.777662 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory\") pod \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.777802 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx2fh\" (UniqueName: \"kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh\") pod \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.777869 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key\") pod \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\" (UID: \"da77edd1-65a1-4f59-a4d3-e57679ae6acf\") " Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.818803 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh" (OuterVolumeSpecName: "kube-api-access-cx2fh") pod "da77edd1-65a1-4f59-a4d3-e57679ae6acf" (UID: "da77edd1-65a1-4f59-a4d3-e57679ae6acf"). InnerVolumeSpecName "kube-api-access-cx2fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.844569 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory" (OuterVolumeSpecName: "inventory") pod "da77edd1-65a1-4f59-a4d3-e57679ae6acf" (UID: "da77edd1-65a1-4f59-a4d3-e57679ae6acf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.863576 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "da77edd1-65a1-4f59-a4d3-e57679ae6acf" (UID: "da77edd1-65a1-4f59-a4d3-e57679ae6acf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.879729 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.880011 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/da77edd1-65a1-4f59-a4d3-e57679ae6acf-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:50 crc kubenswrapper[4576]: I1203 09:25:50.880023 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx2fh\" (UniqueName: \"kubernetes.io/projected/da77edd1-65a1-4f59-a4d3-e57679ae6acf-kube-api-access-cx2fh\") on node \"crc\" DevicePath \"\"" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.247754 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" event={"ID":"da77edd1-65a1-4f59-a4d3-e57679ae6acf","Type":"ContainerDied","Data":"87e380d559d4fde1dc160b4fcd9b995e028cb619ae8cb64ac5259fb1ac8e848c"} Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.247847 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87e380d559d4fde1dc160b4fcd9b995e028cb619ae8cb64ac5259fb1ac8e848c" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.247792 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.347755 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc"] Dec 03 09:25:51 crc kubenswrapper[4576]: E1203 09:25:51.348236 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da77edd1-65a1-4f59-a4d3-e57679ae6acf" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.348263 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="da77edd1-65a1-4f59-a4d3-e57679ae6acf" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.348507 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="da77edd1-65a1-4f59-a4d3-e57679ae6acf" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.349282 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.353200 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.353908 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.362430 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.362963 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.382394 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc"] Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.509193 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.509673 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.509749 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvkkv\" (UniqueName: \"kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.611492 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.611710 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvkkv\" (UniqueName: \"kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.611778 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.618461 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.619029 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.629228 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvkkv\" (UniqueName: \"kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-2vcbc\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:51 crc kubenswrapper[4576]: I1203 09:25:51.670986 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:25:52 crc kubenswrapper[4576]: I1203 09:25:52.202056 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc"] Dec 03 09:25:52 crc kubenswrapper[4576]: I1203 09:25:52.259221 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" event={"ID":"71120894-497f-438e-a42f-f3e6fd50d2de","Type":"ContainerStarted","Data":"5d22e755aa977a6c43c9f52cf597cec8c9f7bbf8d1122d1d4b4429927708fe24"} Dec 03 09:25:53 crc kubenswrapper[4576]: I1203 09:25:53.270063 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" event={"ID":"71120894-497f-438e-a42f-f3e6fd50d2de","Type":"ContainerStarted","Data":"224165737f0b0d34a1d7b7ef0c4b86e7a48fda487e7f363170d471bebb2b3ca4"} Dec 03 09:25:53 crc kubenswrapper[4576]: I1203 09:25:53.288002 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" podStartSLOduration=2.109080901 podStartE2EDuration="2.287979044s" podCreationTimestamp="2025-12-03 09:25:51 +0000 UTC" firstStartedPulling="2025-12-03 09:25:52.210030964 +0000 UTC m=+2759.596007948" lastFinishedPulling="2025-12-03 09:25:52.388929107 +0000 UTC m=+2759.774906091" observedRunningTime="2025-12-03 09:25:53.28595711 +0000 UTC m=+2760.671934094" watchObservedRunningTime="2025-12-03 09:25:53.287979044 +0000 UTC m=+2760.673956028" Dec 03 09:25:58 crc kubenswrapper[4576]: I1203 09:25:58.049891 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-wgjr9"] Dec 03 09:25:58 crc kubenswrapper[4576]: I1203 09:25:58.061278 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-wgjr9"] Dec 03 09:25:59 crc kubenswrapper[4576]: I1203 09:25:59.688609 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cdcf30e-382e-4530-95ea-1590dbe7d7d1" path="/var/lib/kubelet/pods/2cdcf30e-382e-4530-95ea-1590dbe7d7d1/volumes" Dec 03 09:26:09 crc kubenswrapper[4576]: I1203 09:26:09.680511 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:26:09 crc kubenswrapper[4576]: I1203 09:26:09.681271 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.106257 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.108864 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.127639 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.275950 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.276041 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwtpd\" (UniqueName: \"kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.276065 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.377451 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.377542 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwtpd\" (UniqueName: \"kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.377564 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.377940 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.377963 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.404444 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwtpd\" (UniqueName: \"kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd\") pod \"redhat-marketplace-gr72t\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:27 crc kubenswrapper[4576]: I1203 09:26:27.495337 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:28 crc kubenswrapper[4576]: I1203 09:26:28.002609 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:28 crc kubenswrapper[4576]: I1203 09:26:28.590338 4576 generic.go:334] "Generic (PLEG): container finished" podID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerID="6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d" exitCode=0 Dec 03 09:26:28 crc kubenswrapper[4576]: I1203 09:26:28.590464 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerDied","Data":"6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d"} Dec 03 09:26:28 crc kubenswrapper[4576]: I1203 09:26:28.590965 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerStarted","Data":"ed8568aa757733c3f7c2c4fa89cc49ccc8cba83ac630290f69fe7804fc90f197"} Dec 03 09:26:28 crc kubenswrapper[4576]: I1203 09:26:28.594618 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:26:29 crc kubenswrapper[4576]: I1203 09:26:29.603622 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerStarted","Data":"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21"} Dec 03 09:26:30 crc kubenswrapper[4576]: I1203 09:26:30.616511 4576 generic.go:334] "Generic (PLEG): container finished" podID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerID="b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21" exitCode=0 Dec 03 09:26:30 crc kubenswrapper[4576]: I1203 09:26:30.616878 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerDied","Data":"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21"} Dec 03 09:26:32 crc kubenswrapper[4576]: I1203 09:26:32.637980 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerStarted","Data":"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920"} Dec 03 09:26:32 crc kubenswrapper[4576]: I1203 09:26:32.661314 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gr72t" podStartSLOduration=2.789566754 podStartE2EDuration="5.661295483s" podCreationTimestamp="2025-12-03 09:26:27 +0000 UTC" firstStartedPulling="2025-12-03 09:26:28.59438915 +0000 UTC m=+2795.980366134" lastFinishedPulling="2025-12-03 09:26:31.466117869 +0000 UTC m=+2798.852094863" observedRunningTime="2025-12-03 09:26:32.654789318 +0000 UTC m=+2800.040766302" watchObservedRunningTime="2025-12-03 09:26:32.661295483 +0000 UTC m=+2800.047272467" Dec 03 09:26:32 crc kubenswrapper[4576]: I1203 09:26:32.909818 4576 scope.go:117] "RemoveContainer" containerID="ce271073fdd305620d609d731cdfc7425998214290f4e3d5d73fa66bccd0efd7" Dec 03 09:26:35 crc kubenswrapper[4576]: I1203 09:26:35.667629 4576 generic.go:334] "Generic (PLEG): container finished" podID="71120894-497f-438e-a42f-f3e6fd50d2de" containerID="224165737f0b0d34a1d7b7ef0c4b86e7a48fda487e7f363170d471bebb2b3ca4" exitCode=0 Dec 03 09:26:35 crc kubenswrapper[4576]: I1203 09:26:35.667768 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" event={"ID":"71120894-497f-438e-a42f-f3e6fd50d2de","Type":"ContainerDied","Data":"224165737f0b0d34a1d7b7ef0c4b86e7a48fda487e7f363170d471bebb2b3ca4"} Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.145001 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.282747 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key\") pod \"71120894-497f-438e-a42f-f3e6fd50d2de\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.282871 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvkkv\" (UniqueName: \"kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv\") pod \"71120894-497f-438e-a42f-f3e6fd50d2de\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.282919 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory\") pod \"71120894-497f-438e-a42f-f3e6fd50d2de\" (UID: \"71120894-497f-438e-a42f-f3e6fd50d2de\") " Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.315017 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv" (OuterVolumeSpecName: "kube-api-access-qvkkv") pod "71120894-497f-438e-a42f-f3e6fd50d2de" (UID: "71120894-497f-438e-a42f-f3e6fd50d2de"). InnerVolumeSpecName "kube-api-access-qvkkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.338723 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "71120894-497f-438e-a42f-f3e6fd50d2de" (UID: "71120894-497f-438e-a42f-f3e6fd50d2de"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.368677 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory" (OuterVolumeSpecName: "inventory") pod "71120894-497f-438e-a42f-f3e6fd50d2de" (UID: "71120894-497f-438e-a42f-f3e6fd50d2de"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.386006 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.386326 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71120894-497f-438e-a42f-f3e6fd50d2de-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.386420 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvkkv\" (UniqueName: \"kubernetes.io/projected/71120894-497f-438e-a42f-f3e6fd50d2de-kube-api-access-qvkkv\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.496144 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.496590 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.554428 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.699479 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" event={"ID":"71120894-497f-438e-a42f-f3e6fd50d2de","Type":"ContainerDied","Data":"5d22e755aa977a6c43c9f52cf597cec8c9f7bbf8d1122d1d4b4429927708fe24"} Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.699612 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d22e755aa977a6c43c9f52cf597cec8c9f7bbf8d1122d1d4b4429927708fe24" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.700190 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-2vcbc" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.791515 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.855761 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.883594 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq"] Dec 03 09:26:37 crc kubenswrapper[4576]: E1203 09:26:37.884113 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71120894-497f-438e-a42f-f3e6fd50d2de" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.884130 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="71120894-497f-438e-a42f-f3e6fd50d2de" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.884421 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="71120894-497f-438e-a42f-f3e6fd50d2de" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.885248 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.894463 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq"] Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.918275 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.918751 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.918923 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:26:37 crc kubenswrapper[4576]: I1203 09:26:37.919045 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.020875 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b74b\" (UniqueName: \"kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.021169 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.021368 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.122640 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.122698 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b74b\" (UniqueName: \"kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.122769 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.130750 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.132034 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.143271 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b74b\" (UniqueName: \"kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.239251 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:26:38 crc kubenswrapper[4576]: I1203 09:26:38.785657 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq"] Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.680951 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.681347 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.693025 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.694218 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.694372 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3" gracePeriod=600 Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.717497 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" event={"ID":"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8","Type":"ContainerStarted","Data":"1a8eafdca7124a5efda5cdef29683086840edbe8314cd2ab4e44b96c22debc71"} Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.717880 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" event={"ID":"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8","Type":"ContainerStarted","Data":"9a4abf7e4a7681145e03847788677a403b858e37dd11d409f0f5781a53ffc5a0"} Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.717710 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gr72t" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="registry-server" containerID="cri-o://f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920" gracePeriod=2 Dec 03 09:26:39 crc kubenswrapper[4576]: I1203 09:26:39.756420 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" podStartSLOduration=2.617514329 podStartE2EDuration="2.756396225s" podCreationTimestamp="2025-12-03 09:26:37 +0000 UTC" firstStartedPulling="2025-12-03 09:26:38.797039625 +0000 UTC m=+2806.183016609" lastFinishedPulling="2025-12-03 09:26:38.935921521 +0000 UTC m=+2806.321898505" observedRunningTime="2025-12-03 09:26:39.74765939 +0000 UTC m=+2807.133636374" watchObservedRunningTime="2025-12-03 09:26:39.756396225 +0000 UTC m=+2807.142373209" Dec 03 09:26:39 crc kubenswrapper[4576]: E1203 09:26:39.875073 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60b1bede_26e9_4b5d_b450_9866da685693.slice/crio-beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3.scope\": RecentStats: unable to find data in memory cache]" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.224794 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.289486 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwtpd\" (UniqueName: \"kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd\") pod \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.289838 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content\") pod \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.289961 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities\") pod \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\" (UID: \"8be1c885-f5b1-428a-892e-bf0d68ddf12e\") " Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.292648 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities" (OuterVolumeSpecName: "utilities") pod "8be1c885-f5b1-428a-892e-bf0d68ddf12e" (UID: "8be1c885-f5b1-428a-892e-bf0d68ddf12e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.308734 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd" (OuterVolumeSpecName: "kube-api-access-gwtpd") pod "8be1c885-f5b1-428a-892e-bf0d68ddf12e" (UID: "8be1c885-f5b1-428a-892e-bf0d68ddf12e"). InnerVolumeSpecName "kube-api-access-gwtpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.318063 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8be1c885-f5b1-428a-892e-bf0d68ddf12e" (UID: "8be1c885-f5b1-428a-892e-bf0d68ddf12e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.392836 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.393123 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8be1c885-f5b1-428a-892e-bf0d68ddf12e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.393160 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwtpd\" (UniqueName: \"kubernetes.io/projected/8be1c885-f5b1-428a-892e-bf0d68ddf12e-kube-api-access-gwtpd\") on node \"crc\" DevicePath \"\"" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.726981 4576 generic.go:334] "Generic (PLEG): container finished" podID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerID="f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920" exitCode=0 Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.727080 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gr72t" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.728210 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerDied","Data":"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920"} Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.728336 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gr72t" event={"ID":"8be1c885-f5b1-428a-892e-bf0d68ddf12e","Type":"ContainerDied","Data":"ed8568aa757733c3f7c2c4fa89cc49ccc8cba83ac630290f69fe7804fc90f197"} Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.728436 4576 scope.go:117] "RemoveContainer" containerID="f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.730294 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3" exitCode=0 Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.731014 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3"} Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.731039 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1"} Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.761847 4576 scope.go:117] "RemoveContainer" containerID="b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.788250 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.797533 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gr72t"] Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.808391 4576 scope.go:117] "RemoveContainer" containerID="6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.842386 4576 scope.go:117] "RemoveContainer" containerID="f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920" Dec 03 09:26:40 crc kubenswrapper[4576]: E1203 09:26:40.843408 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920\": container with ID starting with f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920 not found: ID does not exist" containerID="f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.843453 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920"} err="failed to get container status \"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920\": rpc error: code = NotFound desc = could not find container \"f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920\": container with ID starting with f84561742f822115e5d672cee30c50a14b28c8ae034e1857b74c67ef47db2920 not found: ID does not exist" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.843481 4576 scope.go:117] "RemoveContainer" containerID="b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21" Dec 03 09:26:40 crc kubenswrapper[4576]: E1203 09:26:40.845191 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21\": container with ID starting with b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21 not found: ID does not exist" containerID="b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.845226 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21"} err="failed to get container status \"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21\": rpc error: code = NotFound desc = could not find container \"b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21\": container with ID starting with b0e5c9392fb59df6e2bebc5183c9c70b6f9d007c52e5c8126321575bcbf77d21 not found: ID does not exist" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.845252 4576 scope.go:117] "RemoveContainer" containerID="6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d" Dec 03 09:26:40 crc kubenswrapper[4576]: E1203 09:26:40.845828 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d\": container with ID starting with 6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d not found: ID does not exist" containerID="6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.845857 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d"} err="failed to get container status \"6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d\": rpc error: code = NotFound desc = could not find container \"6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d\": container with ID starting with 6c35bc955c9a6492bc1c33a94ee8da4db2ae9332f09a74023b27fa3a69f21a7d not found: ID does not exist" Dec 03 09:26:40 crc kubenswrapper[4576]: I1203 09:26:40.845876 4576 scope.go:117] "RemoveContainer" containerID="6a946ea24cd0ff2991d3760b68d68a973a0657937c42a4e51fa62809d15fc324" Dec 03 09:26:41 crc kubenswrapper[4576]: I1203 09:26:41.687303 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" path="/var/lib/kubelet/pods/8be1c885-f5b1-428a-892e-bf0d68ddf12e/volumes" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.133338 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:36 crc kubenswrapper[4576]: E1203 09:27:36.134572 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="registry-server" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.134590 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="registry-server" Dec 03 09:27:36 crc kubenswrapper[4576]: E1203 09:27:36.134618 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="extract-content" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.134628 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="extract-content" Dec 03 09:27:36 crc kubenswrapper[4576]: E1203 09:27:36.134642 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="extract-utilities" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.134650 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="extract-utilities" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.134914 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be1c885-f5b1-428a-892e-bf0d68ddf12e" containerName="registry-server" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.136683 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.172551 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.258541 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.258695 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxjtq\" (UniqueName: \"kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.258824 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.360550 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.360626 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxjtq\" (UniqueName: \"kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.360714 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.361306 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.361362 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.380781 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxjtq\" (UniqueName: \"kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq\") pod \"certified-operators-dm7vc\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.470444 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:36 crc kubenswrapper[4576]: I1203 09:27:36.859865 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:37 crc kubenswrapper[4576]: I1203 09:27:37.240854 4576 generic.go:334] "Generic (PLEG): container finished" podID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerID="6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab" exitCode=0 Dec 03 09:27:37 crc kubenswrapper[4576]: I1203 09:27:37.240906 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerDied","Data":"6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab"} Dec 03 09:27:37 crc kubenswrapper[4576]: I1203 09:27:37.240933 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerStarted","Data":"6225e8093cca226f692c7748ae055600490d8bb54e80cdc2f2a0304e9e8e878c"} Dec 03 09:27:38 crc kubenswrapper[4576]: I1203 09:27:38.251683 4576 generic.go:334] "Generic (PLEG): container finished" podID="be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" containerID="1a8eafdca7124a5efda5cdef29683086840edbe8314cd2ab4e44b96c22debc71" exitCode=0 Dec 03 09:27:38 crc kubenswrapper[4576]: I1203 09:27:38.251751 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" event={"ID":"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8","Type":"ContainerDied","Data":"1a8eafdca7124a5efda5cdef29683086840edbe8314cd2ab4e44b96c22debc71"} Dec 03 09:27:38 crc kubenswrapper[4576]: I1203 09:27:38.256175 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerStarted","Data":"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb"} Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.272953 4576 generic.go:334] "Generic (PLEG): container finished" podID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerID="76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb" exitCode=0 Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.273028 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerDied","Data":"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb"} Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.870592 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.901546 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key\") pod \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.901644 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b74b\" (UniqueName: \"kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b\") pod \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.901780 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory\") pod \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\" (UID: \"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8\") " Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.930552 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b" (OuterVolumeSpecName: "kube-api-access-7b74b") pod "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" (UID: "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8"). InnerVolumeSpecName "kube-api-access-7b74b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.976071 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" (UID: "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:27:39 crc kubenswrapper[4576]: I1203 09:27:39.982962 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory" (OuterVolumeSpecName: "inventory") pod "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" (UID: "be59e37f-89da-4b5c-9126-9fd6fe4d9ec8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.017381 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.017428 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.017439 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b74b\" (UniqueName: \"kubernetes.io/projected/be59e37f-89da-4b5c-9126-9fd6fe4d9ec8-kube-api-access-7b74b\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.292955 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" event={"ID":"be59e37f-89da-4b5c-9126-9fd6fe4d9ec8","Type":"ContainerDied","Data":"9a4abf7e4a7681145e03847788677a403b858e37dd11d409f0f5781a53ffc5a0"} Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.293053 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.293053 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a4abf7e4a7681145e03847788677a403b858e37dd11d409f0f5781a53ffc5a0" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.401076 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k8srx"] Dec 03 09:27:40 crc kubenswrapper[4576]: E1203 09:27:40.401610 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.401633 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.401889 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="be59e37f-89da-4b5c-9126-9fd6fe4d9ec8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.403336 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.411727 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.412144 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.412355 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.412501 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.414915 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k8srx"] Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.551955 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrwjz\" (UniqueName: \"kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.552023 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.552190 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.653698 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.653794 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrwjz\" (UniqueName: \"kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.653821 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.659246 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.659648 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.671431 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrwjz\" (UniqueName: \"kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz\") pod \"ssh-known-hosts-edpm-deployment-k8srx\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:40 crc kubenswrapper[4576]: I1203 09:27:40.764576 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:41 crc kubenswrapper[4576]: I1203 09:27:41.165477 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k8srx"] Dec 03 09:27:41 crc kubenswrapper[4576]: I1203 09:27:41.306035 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" event={"ID":"2c30d1eb-aa4b-44e6-b424-dcdd12b23090","Type":"ContainerStarted","Data":"8eba44278f96653aae2c83df4fd7acc3176d22b0e92ae027fd86e42b725df213"} Dec 03 09:27:41 crc kubenswrapper[4576]: I1203 09:27:41.312186 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerStarted","Data":"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64"} Dec 03 09:27:41 crc kubenswrapper[4576]: I1203 09:27:41.341834 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dm7vc" podStartSLOduration=2.191947149 podStartE2EDuration="5.341814502s" podCreationTimestamp="2025-12-03 09:27:36 +0000 UTC" firstStartedPulling="2025-12-03 09:27:37.242412014 +0000 UTC m=+2864.628388998" lastFinishedPulling="2025-12-03 09:27:40.392279347 +0000 UTC m=+2867.778256351" observedRunningTime="2025-12-03 09:27:41.328911405 +0000 UTC m=+2868.714888409" watchObservedRunningTime="2025-12-03 09:27:41.341814502 +0000 UTC m=+2868.727791486" Dec 03 09:27:42 crc kubenswrapper[4576]: I1203 09:27:42.324116 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" event={"ID":"2c30d1eb-aa4b-44e6-b424-dcdd12b23090","Type":"ContainerStarted","Data":"01fd67a576a28bd9a0639c1293243699ff532c606efb202699f587ec803cc411"} Dec 03 09:27:42 crc kubenswrapper[4576]: I1203 09:27:42.349721 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" podStartSLOduration=2.168191575 podStartE2EDuration="2.349697778s" podCreationTimestamp="2025-12-03 09:27:40 +0000 UTC" firstStartedPulling="2025-12-03 09:27:41.17149887 +0000 UTC m=+2868.557475844" lastFinishedPulling="2025-12-03 09:27:41.353005063 +0000 UTC m=+2868.738982047" observedRunningTime="2025-12-03 09:27:42.338032153 +0000 UTC m=+2869.724009147" watchObservedRunningTime="2025-12-03 09:27:42.349697778 +0000 UTC m=+2869.735674762" Dec 03 09:27:46 crc kubenswrapper[4576]: I1203 09:27:46.471388 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:46 crc kubenswrapper[4576]: I1203 09:27:46.472065 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:46 crc kubenswrapper[4576]: I1203 09:27:46.520138 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:47 crc kubenswrapper[4576]: I1203 09:27:47.432573 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:47 crc kubenswrapper[4576]: I1203 09:27:47.499006 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.389465 4576 generic.go:334] "Generic (PLEG): container finished" podID="2c30d1eb-aa4b-44e6-b424-dcdd12b23090" containerID="01fd67a576a28bd9a0639c1293243699ff532c606efb202699f587ec803cc411" exitCode=0 Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.389689 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" event={"ID":"2c30d1eb-aa4b-44e6-b424-dcdd12b23090","Type":"ContainerDied","Data":"01fd67a576a28bd9a0639c1293243699ff532c606efb202699f587ec803cc411"} Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.390848 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dm7vc" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="registry-server" containerID="cri-o://666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64" gracePeriod=2 Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.940513 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.995959 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities\") pod \"2be046de-fd7e-4a89-84ad-d62b93d905bd\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.996251 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content\") pod \"2be046de-fd7e-4a89-84ad-d62b93d905bd\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.996381 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxjtq\" (UniqueName: \"kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq\") pod \"2be046de-fd7e-4a89-84ad-d62b93d905bd\" (UID: \"2be046de-fd7e-4a89-84ad-d62b93d905bd\") " Dec 03 09:27:49 crc kubenswrapper[4576]: I1203 09:27:49.996802 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities" (OuterVolumeSpecName: "utilities") pod "2be046de-fd7e-4a89-84ad-d62b93d905bd" (UID: "2be046de-fd7e-4a89-84ad-d62b93d905bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.010580 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq" (OuterVolumeSpecName: "kube-api-access-zxjtq") pod "2be046de-fd7e-4a89-84ad-d62b93d905bd" (UID: "2be046de-fd7e-4a89-84ad-d62b93d905bd"). InnerVolumeSpecName "kube-api-access-zxjtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.051783 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2be046de-fd7e-4a89-84ad-d62b93d905bd" (UID: "2be046de-fd7e-4a89-84ad-d62b93d905bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.098322 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.098356 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2be046de-fd7e-4a89-84ad-d62b93d905bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.098368 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxjtq\" (UniqueName: \"kubernetes.io/projected/2be046de-fd7e-4a89-84ad-d62b93d905bd-kube-api-access-zxjtq\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.401387 4576 generic.go:334] "Generic (PLEG): container finished" podID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerID="666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64" exitCode=0 Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.401471 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dm7vc" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.401546 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerDied","Data":"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64"} Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.401939 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dm7vc" event={"ID":"2be046de-fd7e-4a89-84ad-d62b93d905bd","Type":"ContainerDied","Data":"6225e8093cca226f692c7748ae055600490d8bb54e80cdc2f2a0304e9e8e878c"} Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.401973 4576 scope.go:117] "RemoveContainer" containerID="666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.443832 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.474099 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dm7vc"] Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.476499 4576 scope.go:117] "RemoveContainer" containerID="76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.530750 4576 scope.go:117] "RemoveContainer" containerID="6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.555172 4576 scope.go:117] "RemoveContainer" containerID="666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64" Dec 03 09:27:50 crc kubenswrapper[4576]: E1203 09:27:50.556045 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64\": container with ID starting with 666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64 not found: ID does not exist" containerID="666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.556108 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64"} err="failed to get container status \"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64\": rpc error: code = NotFound desc = could not find container \"666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64\": container with ID starting with 666a2f0d34f3229c44047388a9512314329c1dfbbba6d7962065671ea6f20c64 not found: ID does not exist" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.556136 4576 scope.go:117] "RemoveContainer" containerID="76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb" Dec 03 09:27:50 crc kubenswrapper[4576]: E1203 09:27:50.556659 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb\": container with ID starting with 76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb not found: ID does not exist" containerID="76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.556703 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb"} err="failed to get container status \"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb\": rpc error: code = NotFound desc = could not find container \"76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb\": container with ID starting with 76b476870a42a6733ba31eed606fdfd3d031d49709850c1f8848ef02cb09f7eb not found: ID does not exist" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.556730 4576 scope.go:117] "RemoveContainer" containerID="6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab" Dec 03 09:27:50 crc kubenswrapper[4576]: E1203 09:27:50.557045 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab\": container with ID starting with 6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab not found: ID does not exist" containerID="6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.557081 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab"} err="failed to get container status \"6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab\": rpc error: code = NotFound desc = could not find container \"6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab\": container with ID starting with 6276a8926bc5516291ec42cfeaf37c64311db8357be12ee8be8b98ba1d47bfab not found: ID does not exist" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.833655 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.919282 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam\") pod \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.920573 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrwjz\" (UniqueName: \"kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz\") pod \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.920730 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0\") pod \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\" (UID: \"2c30d1eb-aa4b-44e6-b424-dcdd12b23090\") " Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.927154 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz" (OuterVolumeSpecName: "kube-api-access-xrwjz") pod "2c30d1eb-aa4b-44e6-b424-dcdd12b23090" (UID: "2c30d1eb-aa4b-44e6-b424-dcdd12b23090"). InnerVolumeSpecName "kube-api-access-xrwjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.950351 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "2c30d1eb-aa4b-44e6-b424-dcdd12b23090" (UID: "2c30d1eb-aa4b-44e6-b424-dcdd12b23090"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:27:50 crc kubenswrapper[4576]: I1203 09:27:50.958056 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "2c30d1eb-aa4b-44e6-b424-dcdd12b23090" (UID: "2c30d1eb-aa4b-44e6-b424-dcdd12b23090"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.023679 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrwjz\" (UniqueName: \"kubernetes.io/projected/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-kube-api-access-xrwjz\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.023720 4576 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.023735 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c30d1eb-aa4b-44e6-b424-dcdd12b23090-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.413736 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" event={"ID":"2c30d1eb-aa4b-44e6-b424-dcdd12b23090","Type":"ContainerDied","Data":"8eba44278f96653aae2c83df4fd7acc3176d22b0e92ae027fd86e42b725df213"} Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.413791 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eba44278f96653aae2c83df4fd7acc3176d22b0e92ae027fd86e42b725df213" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.415299 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k8srx" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.543455 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89"] Dec 03 09:27:51 crc kubenswrapper[4576]: E1203 09:27:51.552214 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c30d1eb-aa4b-44e6-b424-dcdd12b23090" containerName="ssh-known-hosts-edpm-deployment" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552249 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c30d1eb-aa4b-44e6-b424-dcdd12b23090" containerName="ssh-known-hosts-edpm-deployment" Dec 03 09:27:51 crc kubenswrapper[4576]: E1203 09:27:51.552264 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="extract-content" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552273 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="extract-content" Dec 03 09:27:51 crc kubenswrapper[4576]: E1203 09:27:51.552284 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="registry-server" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552294 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="registry-server" Dec 03 09:27:51 crc kubenswrapper[4576]: E1203 09:27:51.552316 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="extract-utilities" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552325 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="extract-utilities" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552580 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c30d1eb-aa4b-44e6-b424-dcdd12b23090" containerName="ssh-known-hosts-edpm-deployment" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.552599 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" containerName="registry-server" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.553393 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.558142 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.558350 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.558489 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.558632 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.565415 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89"] Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.635771 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.636111 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.636302 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4sb6\" (UniqueName: \"kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.686978 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2be046de-fd7e-4a89-84ad-d62b93d905bd" path="/var/lib/kubelet/pods/2be046de-fd7e-4a89-84ad-d62b93d905bd/volumes" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.738156 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.738329 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4sb6\" (UniqueName: \"kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.738365 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.747557 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.756069 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.756450 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4sb6\" (UniqueName: \"kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-h2l89\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:51 crc kubenswrapper[4576]: I1203 09:27:51.878576 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:27:52 crc kubenswrapper[4576]: I1203 09:27:52.454356 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89"] Dec 03 09:27:53 crc kubenswrapper[4576]: I1203 09:27:53.437299 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" event={"ID":"9afda75e-55d1-4823-a4be-3c79bf36b3b2","Type":"ContainerStarted","Data":"149a5169ca812d214971806317d89cb5713276c826f891bf6f695ed891f2b63e"} Dec 03 09:27:53 crc kubenswrapper[4576]: I1203 09:27:53.437662 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" event={"ID":"9afda75e-55d1-4823-a4be-3c79bf36b3b2","Type":"ContainerStarted","Data":"7516a61de8076e7172a02c5f9f74ff23074ba135b043314f1776dbf3c637ba66"} Dec 03 09:27:53 crc kubenswrapper[4576]: I1203 09:27:53.463773 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" podStartSLOduration=2.291209908 podStartE2EDuration="2.463738061s" podCreationTimestamp="2025-12-03 09:27:51 +0000 UTC" firstStartedPulling="2025-12-03 09:27:52.46530232 +0000 UTC m=+2879.851279304" lastFinishedPulling="2025-12-03 09:27:52.637830473 +0000 UTC m=+2880.023807457" observedRunningTime="2025-12-03 09:27:53.458060089 +0000 UTC m=+2880.844037093" watchObservedRunningTime="2025-12-03 09:27:53.463738061 +0000 UTC m=+2880.849715055" Dec 03 09:28:01 crc kubenswrapper[4576]: I1203 09:28:01.535903 4576 generic.go:334] "Generic (PLEG): container finished" podID="9afda75e-55d1-4823-a4be-3c79bf36b3b2" containerID="149a5169ca812d214971806317d89cb5713276c826f891bf6f695ed891f2b63e" exitCode=0 Dec 03 09:28:01 crc kubenswrapper[4576]: I1203 09:28:01.536000 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" event={"ID":"9afda75e-55d1-4823-a4be-3c79bf36b3b2","Type":"ContainerDied","Data":"149a5169ca812d214971806317d89cb5713276c826f891bf6f695ed891f2b63e"} Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.034884 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.155987 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key\") pod \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.156263 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4sb6\" (UniqueName: \"kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6\") pod \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.156588 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory\") pod \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\" (UID: \"9afda75e-55d1-4823-a4be-3c79bf36b3b2\") " Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.161807 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6" (OuterVolumeSpecName: "kube-api-access-l4sb6") pod "9afda75e-55d1-4823-a4be-3c79bf36b3b2" (UID: "9afda75e-55d1-4823-a4be-3c79bf36b3b2"). InnerVolumeSpecName "kube-api-access-l4sb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.187665 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9afda75e-55d1-4823-a4be-3c79bf36b3b2" (UID: "9afda75e-55d1-4823-a4be-3c79bf36b3b2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.193334 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory" (OuterVolumeSpecName: "inventory") pod "9afda75e-55d1-4823-a4be-3c79bf36b3b2" (UID: "9afda75e-55d1-4823-a4be-3c79bf36b3b2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.258870 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4sb6\" (UniqueName: \"kubernetes.io/projected/9afda75e-55d1-4823-a4be-3c79bf36b3b2-kube-api-access-l4sb6\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.258910 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.258924 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9afda75e-55d1-4823-a4be-3c79bf36b3b2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.556171 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" event={"ID":"9afda75e-55d1-4823-a4be-3c79bf36b3b2","Type":"ContainerDied","Data":"7516a61de8076e7172a02c5f9f74ff23074ba135b043314f1776dbf3c637ba66"} Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.556218 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7516a61de8076e7172a02c5f9f74ff23074ba135b043314f1776dbf3c637ba66" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.556221 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-h2l89" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.647218 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf"] Dec 03 09:28:03 crc kubenswrapper[4576]: E1203 09:28:03.647855 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9afda75e-55d1-4823-a4be-3c79bf36b3b2" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.647885 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9afda75e-55d1-4823-a4be-3c79bf36b3b2" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.648282 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9afda75e-55d1-4823-a4be-3c79bf36b3b2" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.649373 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.652490 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.652757 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.652905 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.653088 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.672114 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf"] Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.769709 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.769923 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdqh4\" (UniqueName: \"kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.770183 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.872606 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.872754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.872895 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdqh4\" (UniqueName: \"kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.879165 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.879405 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.888957 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdqh4\" (UniqueName: \"kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:03 crc kubenswrapper[4576]: I1203 09:28:03.971578 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:04 crc kubenswrapper[4576]: I1203 09:28:04.526907 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf"] Dec 03 09:28:04 crc kubenswrapper[4576]: I1203 09:28:04.565948 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" event={"ID":"96dfc3f2-bb7c-407d-8714-98a679e6d78e","Type":"ContainerStarted","Data":"0286a1d7ee325a5f075a6f8fba140aaf2b0180c44f525e98c60479ab511d7ef3"} Dec 03 09:28:05 crc kubenswrapper[4576]: I1203 09:28:05.587638 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" event={"ID":"96dfc3f2-bb7c-407d-8714-98a679e6d78e","Type":"ContainerStarted","Data":"3fac2783e470f29b2bc1980d4732092ba0614c49440893fdc4080803472a1161"} Dec 03 09:28:05 crc kubenswrapper[4576]: I1203 09:28:05.620936 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" podStartSLOduration=2.419095207 podStartE2EDuration="2.620910037s" podCreationTimestamp="2025-12-03 09:28:03 +0000 UTC" firstStartedPulling="2025-12-03 09:28:04.530246384 +0000 UTC m=+2891.916223368" lastFinishedPulling="2025-12-03 09:28:04.732061194 +0000 UTC m=+2892.118038198" observedRunningTime="2025-12-03 09:28:05.611224396 +0000 UTC m=+2892.997201390" watchObservedRunningTime="2025-12-03 09:28:05.620910037 +0000 UTC m=+2893.006887031" Dec 03 09:28:15 crc kubenswrapper[4576]: I1203 09:28:15.704407 4576 generic.go:334] "Generic (PLEG): container finished" podID="96dfc3f2-bb7c-407d-8714-98a679e6d78e" containerID="3fac2783e470f29b2bc1980d4732092ba0614c49440893fdc4080803472a1161" exitCode=0 Dec 03 09:28:15 crc kubenswrapper[4576]: I1203 09:28:15.705273 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" event={"ID":"96dfc3f2-bb7c-407d-8714-98a679e6d78e","Type":"ContainerDied","Data":"3fac2783e470f29b2bc1980d4732092ba0614c49440893fdc4080803472a1161"} Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.167269 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.295268 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdqh4\" (UniqueName: \"kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4\") pod \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.295371 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory\") pod \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.295397 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key\") pod \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\" (UID: \"96dfc3f2-bb7c-407d-8714-98a679e6d78e\") " Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.321049 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4" (OuterVolumeSpecName: "kube-api-access-xdqh4") pod "96dfc3f2-bb7c-407d-8714-98a679e6d78e" (UID: "96dfc3f2-bb7c-407d-8714-98a679e6d78e"). InnerVolumeSpecName "kube-api-access-xdqh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.328977 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96dfc3f2-bb7c-407d-8714-98a679e6d78e" (UID: "96dfc3f2-bb7c-407d-8714-98a679e6d78e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.330380 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory" (OuterVolumeSpecName: "inventory") pod "96dfc3f2-bb7c-407d-8714-98a679e6d78e" (UID: "96dfc3f2-bb7c-407d-8714-98a679e6d78e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.397578 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdqh4\" (UniqueName: \"kubernetes.io/projected/96dfc3f2-bb7c-407d-8714-98a679e6d78e-kube-api-access-xdqh4\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.397614 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.397626 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96dfc3f2-bb7c-407d-8714-98a679e6d78e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.729210 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" event={"ID":"96dfc3f2-bb7c-407d-8714-98a679e6d78e","Type":"ContainerDied","Data":"0286a1d7ee325a5f075a6f8fba140aaf2b0180c44f525e98c60479ab511d7ef3"} Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.729618 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0286a1d7ee325a5f075a6f8fba140aaf2b0180c44f525e98c60479ab511d7ef3" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.729290 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.878607 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249"] Dec 03 09:28:17 crc kubenswrapper[4576]: E1203 09:28:17.879056 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dfc3f2-bb7c-407d-8714-98a679e6d78e" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.879078 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dfc3f2-bb7c-407d-8714-98a679e6d78e" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.879349 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="96dfc3f2-bb7c-407d-8714-98a679e6d78e" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.880200 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.884497 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.885797 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.886862 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.887149 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.887312 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.887494 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.888020 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.888271 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:28:17 crc kubenswrapper[4576]: I1203 09:28:17.919297 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249"] Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009164 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009232 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009271 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009299 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009441 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009549 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009630 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009689 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009764 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk5pc\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.009951 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.010123 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.010234 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.010301 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.010347 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112060 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112139 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112181 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112209 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112228 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112256 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112285 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112306 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112322 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112348 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112381 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112406 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112596 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.112632 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk5pc\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.121640 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.122427 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.122563 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.122911 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.125229 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.127364 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.128444 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.128993 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.133474 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.135842 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.136717 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.137835 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.143271 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.143633 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk5pc\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j6249\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.204035 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:28:18 crc kubenswrapper[4576]: I1203 09:28:18.799418 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249"] Dec 03 09:28:19 crc kubenswrapper[4576]: I1203 09:28:19.752040 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" event={"ID":"55ad3ecf-5525-4292-b4e9-98456a2dc903","Type":"ContainerStarted","Data":"4ac6d9b97ef594cf42204d379f49609ed9829460d90cecd54826167e015506c3"} Dec 03 09:28:19 crc kubenswrapper[4576]: I1203 09:28:19.752676 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" event={"ID":"55ad3ecf-5525-4292-b4e9-98456a2dc903","Type":"ContainerStarted","Data":"187324087abef2e4391be12fb18b9629e701bd2bee80bdb8cc312fd0020a556b"} Dec 03 09:28:19 crc kubenswrapper[4576]: I1203 09:28:19.776960 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" podStartSLOduration=2.572580024 podStartE2EDuration="2.776943961s" podCreationTimestamp="2025-12-03 09:28:17 +0000 UTC" firstStartedPulling="2025-12-03 09:28:18.817639523 +0000 UTC m=+2906.203616507" lastFinishedPulling="2025-12-03 09:28:19.02200346 +0000 UTC m=+2906.407980444" observedRunningTime="2025-12-03 09:28:19.775907413 +0000 UTC m=+2907.161884467" watchObservedRunningTime="2025-12-03 09:28:19.776943961 +0000 UTC m=+2907.162920935" Dec 03 09:29:00 crc kubenswrapper[4576]: I1203 09:29:00.133668 4576 generic.go:334] "Generic (PLEG): container finished" podID="55ad3ecf-5525-4292-b4e9-98456a2dc903" containerID="4ac6d9b97ef594cf42204d379f49609ed9829460d90cecd54826167e015506c3" exitCode=0 Dec 03 09:29:00 crc kubenswrapper[4576]: I1203 09:29:00.133761 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" event={"ID":"55ad3ecf-5525-4292-b4e9-98456a2dc903","Type":"ContainerDied","Data":"4ac6d9b97ef594cf42204d379f49609ed9829460d90cecd54826167e015506c3"} Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.581837 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.715285 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.715714 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.715766 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716000 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716081 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716174 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716215 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716247 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716277 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716327 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716377 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716413 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716466 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.716491 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk5pc\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc\") pod \"55ad3ecf-5525-4292-b4e9-98456a2dc903\" (UID: \"55ad3ecf-5525-4292-b4e9-98456a2dc903\") " Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.723342 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.724573 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.725433 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.728152 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.729256 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc" (OuterVolumeSpecName: "kube-api-access-pk5pc") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "kube-api-access-pk5pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.729405 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.730291 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.730434 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.732881 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.734132 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.735109 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.737687 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.754539 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.773746 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory" (OuterVolumeSpecName: "inventory") pod "55ad3ecf-5525-4292-b4e9-98456a2dc903" (UID: "55ad3ecf-5525-4292-b4e9-98456a2dc903"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819191 4576 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819247 4576 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819272 4576 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819292 4576 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819310 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819328 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819348 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819367 4576 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819384 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819401 4576 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819418 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk5pc\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-kube-api-access-pk5pc\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819438 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819455 4576 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad3ecf-5525-4292-b4e9-98456a2dc903-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:01 crc kubenswrapper[4576]: I1203 09:29:01.819473 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55ad3ecf-5525-4292-b4e9-98456a2dc903-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.155324 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" event={"ID":"55ad3ecf-5525-4292-b4e9-98456a2dc903","Type":"ContainerDied","Data":"187324087abef2e4391be12fb18b9629e701bd2bee80bdb8cc312fd0020a556b"} Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.155360 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="187324087abef2e4391be12fb18b9629e701bd2bee80bdb8cc312fd0020a556b" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.155423 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j6249" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.296604 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7"] Dec 03 09:29:02 crc kubenswrapper[4576]: E1203 09:29:02.297100 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ad3ecf-5525-4292-b4e9-98456a2dc903" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.297125 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ad3ecf-5525-4292-b4e9-98456a2dc903" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.297353 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ad3ecf-5525-4292-b4e9-98456a2dc903" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.298217 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.302739 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.302756 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.302873 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.302944 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.304097 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.308935 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7"] Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.350962 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.351088 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.351151 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.351182 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsjzd\" (UniqueName: \"kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.353979 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.456206 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.456311 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.456339 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsjzd\" (UniqueName: \"kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.456473 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.456597 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.457693 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.461504 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.462322 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.462785 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.476646 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsjzd\" (UniqueName: \"kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6n2r7\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:02 crc kubenswrapper[4576]: I1203 09:29:02.653331 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:29:03 crc kubenswrapper[4576]: I1203 09:29:03.247484 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7"] Dec 03 09:29:04 crc kubenswrapper[4576]: I1203 09:29:04.173133 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" event={"ID":"d7aa650b-ed26-494b-bc5f-95320ad9be67","Type":"ContainerStarted","Data":"cf697ea2ea7e9f2aa5d188624d1cd1d30735e1ab45e22a8d648f01b4abff917f"} Dec 03 09:29:04 crc kubenswrapper[4576]: I1203 09:29:04.173505 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" event={"ID":"d7aa650b-ed26-494b-bc5f-95320ad9be67","Type":"ContainerStarted","Data":"03fbfc5fb6021b3770019770ab26a881960ac887d587fcec80844dd5664aeb2a"} Dec 03 09:29:04 crc kubenswrapper[4576]: I1203 09:29:04.196882 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" podStartSLOduration=2.000156217 podStartE2EDuration="2.19685934s" podCreationTimestamp="2025-12-03 09:29:02 +0000 UTC" firstStartedPulling="2025-12-03 09:29:03.264880276 +0000 UTC m=+2950.650857260" lastFinishedPulling="2025-12-03 09:29:03.461583399 +0000 UTC m=+2950.847560383" observedRunningTime="2025-12-03 09:29:04.18941712 +0000 UTC m=+2951.575394114" watchObservedRunningTime="2025-12-03 09:29:04.19685934 +0000 UTC m=+2951.582836334" Dec 03 09:29:09 crc kubenswrapper[4576]: I1203 09:29:09.680502 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:29:09 crc kubenswrapper[4576]: I1203 09:29:09.681088 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:29:39 crc kubenswrapper[4576]: I1203 09:29:39.681118 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:29:39 crc kubenswrapper[4576]: I1203 09:29:39.681802 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.157222 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499"] Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.159198 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.161754 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.161764 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.172018 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499"] Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.184699 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjvbl\" (UniqueName: \"kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.184801 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.184857 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.286158 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvbl\" (UniqueName: \"kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.286288 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.286366 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.289019 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.310024 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.324498 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjvbl\" (UniqueName: \"kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl\") pod \"collect-profiles-29412570-b6499\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.486775 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:00 crc kubenswrapper[4576]: I1203 09:30:00.988139 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499"] Dec 03 09:30:01 crc kubenswrapper[4576]: I1203 09:30:01.735007 4576 generic.go:334] "Generic (PLEG): container finished" podID="c374ea3b-7842-47d4-b6ca-2462ce18ce46" containerID="75df304071699a1d95ca8bb581e66e6f0d5c9e02d587bcae2a02be47e7c40d61" exitCode=0 Dec 03 09:30:01 crc kubenswrapper[4576]: I1203 09:30:01.736423 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" event={"ID":"c374ea3b-7842-47d4-b6ca-2462ce18ce46","Type":"ContainerDied","Data":"75df304071699a1d95ca8bb581e66e6f0d5c9e02d587bcae2a02be47e7c40d61"} Dec 03 09:30:01 crc kubenswrapper[4576]: I1203 09:30:01.739817 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" event={"ID":"c374ea3b-7842-47d4-b6ca-2462ce18ce46","Type":"ContainerStarted","Data":"abb9240418400143c9994cccc8d76e873aa91ddfd1c280e93d66d3bfbcebf0d1"} Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.083973 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.151761 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjvbl\" (UniqueName: \"kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl\") pod \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.152206 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume\") pod \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.152245 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume\") pod \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\" (UID: \"c374ea3b-7842-47d4-b6ca-2462ce18ce46\") " Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.153171 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume" (OuterVolumeSpecName: "config-volume") pod "c374ea3b-7842-47d4-b6ca-2462ce18ce46" (UID: "c374ea3b-7842-47d4-b6ca-2462ce18ce46"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.159906 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl" (OuterVolumeSpecName: "kube-api-access-gjvbl") pod "c374ea3b-7842-47d4-b6ca-2462ce18ce46" (UID: "c374ea3b-7842-47d4-b6ca-2462ce18ce46"). InnerVolumeSpecName "kube-api-access-gjvbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.162909 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c374ea3b-7842-47d4-b6ca-2462ce18ce46" (UID: "c374ea3b-7842-47d4-b6ca-2462ce18ce46"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.254576 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c374ea3b-7842-47d4-b6ca-2462ce18ce46-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.254617 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c374ea3b-7842-47d4-b6ca-2462ce18ce46-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.254628 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjvbl\" (UniqueName: \"kubernetes.io/projected/c374ea3b-7842-47d4-b6ca-2462ce18ce46-kube-api-access-gjvbl\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.759054 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" event={"ID":"c374ea3b-7842-47d4-b6ca-2462ce18ce46","Type":"ContainerDied","Data":"abb9240418400143c9994cccc8d76e873aa91ddfd1c280e93d66d3bfbcebf0d1"} Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.759096 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abb9240418400143c9994cccc8d76e873aa91ddfd1c280e93d66d3bfbcebf0d1" Dec 03 09:30:03 crc kubenswrapper[4576]: I1203 09:30:03.759115 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499" Dec 03 09:30:04 crc kubenswrapper[4576]: I1203 09:30:04.176348 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz"] Dec 03 09:30:04 crc kubenswrapper[4576]: I1203 09:30:04.185707 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412525-8cqsz"] Dec 03 09:30:05 crc kubenswrapper[4576]: I1203 09:30:05.687876 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8455c8a5-3cf3-44b7-98dd-9d6564fb6994" path="/var/lib/kubelet/pods/8455c8a5-3cf3-44b7-98dd-9d6564fb6994/volumes" Dec 03 09:30:09 crc kubenswrapper[4576]: I1203 09:30:09.680940 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:30:09 crc kubenswrapper[4576]: I1203 09:30:09.681488 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:30:09 crc kubenswrapper[4576]: I1203 09:30:09.694899 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:30:09 crc kubenswrapper[4576]: I1203 09:30:09.697066 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:30:09 crc kubenswrapper[4576]: I1203 09:30:09.697333 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" gracePeriod=600 Dec 03 09:30:09 crc kubenswrapper[4576]: E1203 09:30:09.828908 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:30:10 crc kubenswrapper[4576]: I1203 09:30:10.827283 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" exitCode=0 Dec 03 09:30:10 crc kubenswrapper[4576]: I1203 09:30:10.827393 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1"} Dec 03 09:30:10 crc kubenswrapper[4576]: I1203 09:30:10.827856 4576 scope.go:117] "RemoveContainer" containerID="beab5b73426a8a2ed784ee9435f3e0edb3d47b3882d27e186bb2abf808fa00f3" Dec 03 09:30:10 crc kubenswrapper[4576]: I1203 09:30:10.829048 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:30:10 crc kubenswrapper[4576]: E1203 09:30:10.829984 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:30:14 crc kubenswrapper[4576]: I1203 09:30:14.867165 4576 generic.go:334] "Generic (PLEG): container finished" podID="d7aa650b-ed26-494b-bc5f-95320ad9be67" containerID="cf697ea2ea7e9f2aa5d188624d1cd1d30735e1ab45e22a8d648f01b4abff917f" exitCode=0 Dec 03 09:30:14 crc kubenswrapper[4576]: I1203 09:30:14.867234 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" event={"ID":"d7aa650b-ed26-494b-bc5f-95320ad9be67","Type":"ContainerDied","Data":"cf697ea2ea7e9f2aa5d188624d1cd1d30735e1ab45e22a8d648f01b4abff917f"} Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.354526 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.411936 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0\") pod \"d7aa650b-ed26-494b-bc5f-95320ad9be67\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.412007 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory\") pod \"d7aa650b-ed26-494b-bc5f-95320ad9be67\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.412026 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle\") pod \"d7aa650b-ed26-494b-bc5f-95320ad9be67\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.412179 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key\") pod \"d7aa650b-ed26-494b-bc5f-95320ad9be67\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.412233 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsjzd\" (UniqueName: \"kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd\") pod \"d7aa650b-ed26-494b-bc5f-95320ad9be67\" (UID: \"d7aa650b-ed26-494b-bc5f-95320ad9be67\") " Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.421573 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d7aa650b-ed26-494b-bc5f-95320ad9be67" (UID: "d7aa650b-ed26-494b-bc5f-95320ad9be67"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.421766 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd" (OuterVolumeSpecName: "kube-api-access-gsjzd") pod "d7aa650b-ed26-494b-bc5f-95320ad9be67" (UID: "d7aa650b-ed26-494b-bc5f-95320ad9be67"). InnerVolumeSpecName "kube-api-access-gsjzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.439501 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "d7aa650b-ed26-494b-bc5f-95320ad9be67" (UID: "d7aa650b-ed26-494b-bc5f-95320ad9be67"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.445825 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7aa650b-ed26-494b-bc5f-95320ad9be67" (UID: "d7aa650b-ed26-494b-bc5f-95320ad9be67"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.450030 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory" (OuterVolumeSpecName: "inventory") pod "d7aa650b-ed26-494b-bc5f-95320ad9be67" (UID: "d7aa650b-ed26-494b-bc5f-95320ad9be67"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.514570 4576 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.514605 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.514618 4576 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.514630 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7aa650b-ed26-494b-bc5f-95320ad9be67-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.514641 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsjzd\" (UniqueName: \"kubernetes.io/projected/d7aa650b-ed26-494b-bc5f-95320ad9be67-kube-api-access-gsjzd\") on node \"crc\" DevicePath \"\"" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.895563 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" event={"ID":"d7aa650b-ed26-494b-bc5f-95320ad9be67","Type":"ContainerDied","Data":"03fbfc5fb6021b3770019770ab26a881960ac887d587fcec80844dd5664aeb2a"} Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.895623 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03fbfc5fb6021b3770019770ab26a881960ac887d587fcec80844dd5664aeb2a" Dec 03 09:30:16 crc kubenswrapper[4576]: I1203 09:30:16.895634 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6n2r7" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.070907 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5"] Dec 03 09:30:17 crc kubenswrapper[4576]: E1203 09:30:17.071715 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c374ea3b-7842-47d4-b6ca-2462ce18ce46" containerName="collect-profiles" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.071738 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c374ea3b-7842-47d4-b6ca-2462ce18ce46" containerName="collect-profiles" Dec 03 09:30:17 crc kubenswrapper[4576]: E1203 09:30:17.071774 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7aa650b-ed26-494b-bc5f-95320ad9be67" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.071782 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7aa650b-ed26-494b-bc5f-95320ad9be67" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.071983 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7aa650b-ed26-494b-bc5f-95320ad9be67" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.072016 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c374ea3b-7842-47d4-b6ca-2462ce18ce46" containerName="collect-profiles" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.072767 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.076406 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.076597 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.077957 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.079110 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.079168 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.079110 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.090809 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5"] Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.126946 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.127037 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.127070 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.127149 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8f7v\" (UniqueName: \"kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.127258 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.127333 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228669 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228709 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228774 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8f7v\" (UniqueName: \"kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228851 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228899 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.228916 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.235400 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.235872 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.243307 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.243598 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.247182 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.256889 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8f7v\" (UniqueName: \"kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.430845 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:30:17 crc kubenswrapper[4576]: I1203 09:30:17.950806 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5"] Dec 03 09:30:18 crc kubenswrapper[4576]: I1203 09:30:18.919634 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" event={"ID":"94db1892-a7dd-4a07-b181-fa2fbcffe2fc","Type":"ContainerStarted","Data":"b4da5063ad84caaf5fcf5e2a8f69b5f3c3687acb030d1e8b5a616adee02be134"} Dec 03 09:30:18 crc kubenswrapper[4576]: I1203 09:30:18.920073 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" event={"ID":"94db1892-a7dd-4a07-b181-fa2fbcffe2fc","Type":"ContainerStarted","Data":"0cc3e1855d871d19a9cc16febe3c05e81438947dba9c8e0ba1a72b21ba7e638d"} Dec 03 09:30:18 crc kubenswrapper[4576]: I1203 09:30:18.944048 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" podStartSLOduration=1.7458906299999999 podStartE2EDuration="1.944029072s" podCreationTimestamp="2025-12-03 09:30:17 +0000 UTC" firstStartedPulling="2025-12-03 09:30:17.954207052 +0000 UTC m=+3025.340184046" lastFinishedPulling="2025-12-03 09:30:18.152345514 +0000 UTC m=+3025.538322488" observedRunningTime="2025-12-03 09:30:18.937858666 +0000 UTC m=+3026.323835670" watchObservedRunningTime="2025-12-03 09:30:18.944029072 +0000 UTC m=+3026.330006056" Dec 03 09:30:25 crc kubenswrapper[4576]: I1203 09:30:25.676998 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:30:25 crc kubenswrapper[4576]: E1203 09:30:25.677734 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:30:33 crc kubenswrapper[4576]: I1203 09:30:33.104505 4576 scope.go:117] "RemoveContainer" containerID="12a0b594286b95b5a6d84e0fb781b5212bba03fa2940c0fbd328d136d89d2b65" Dec 03 09:30:36 crc kubenswrapper[4576]: I1203 09:30:36.677571 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:30:36 crc kubenswrapper[4576]: E1203 09:30:36.678282 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:30:48 crc kubenswrapper[4576]: I1203 09:30:48.677084 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:30:48 crc kubenswrapper[4576]: E1203 09:30:48.677744 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:31:03 crc kubenswrapper[4576]: I1203 09:31:03.684452 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:31:03 crc kubenswrapper[4576]: E1203 09:31:03.686592 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:31:09 crc kubenswrapper[4576]: I1203 09:31:09.485027 4576 generic.go:334] "Generic (PLEG): container finished" podID="94db1892-a7dd-4a07-b181-fa2fbcffe2fc" containerID="b4da5063ad84caaf5fcf5e2a8f69b5f3c3687acb030d1e8b5a616adee02be134" exitCode=0 Dec 03 09:31:09 crc kubenswrapper[4576]: I1203 09:31:09.485154 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" event={"ID":"94db1892-a7dd-4a07-b181-fa2fbcffe2fc","Type":"ContainerDied","Data":"b4da5063ad84caaf5fcf5e2a8f69b5f3c3687acb030d1e8b5a616adee02be134"} Dec 03 09:31:10 crc kubenswrapper[4576]: I1203 09:31:10.944488 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096186 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096249 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096300 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096416 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096439 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.096480 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8f7v\" (UniqueName: \"kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v\") pod \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\" (UID: \"94db1892-a7dd-4a07-b181-fa2fbcffe2fc\") " Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.104231 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.106582 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v" (OuterVolumeSpecName: "kube-api-access-d8f7v") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "kube-api-access-d8f7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.140273 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.141040 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory" (OuterVolumeSpecName: "inventory") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.164821 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.176769 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "94db1892-a7dd-4a07-b181-fa2fbcffe2fc" (UID: "94db1892-a7dd-4a07-b181-fa2fbcffe2fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198364 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8f7v\" (UniqueName: \"kubernetes.io/projected/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-kube-api-access-d8f7v\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198405 4576 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198416 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198426 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198434 4576 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.198446 4576 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/94db1892-a7dd-4a07-b181-fa2fbcffe2fc-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.508860 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" event={"ID":"94db1892-a7dd-4a07-b181-fa2fbcffe2fc","Type":"ContainerDied","Data":"0cc3e1855d871d19a9cc16febe3c05e81438947dba9c8e0ba1a72b21ba7e638d"} Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.509276 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cc3e1855d871d19a9cc16febe3c05e81438947dba9c8e0ba1a72b21ba7e638d" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.508949 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.687993 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9"] Dec 03 09:31:11 crc kubenswrapper[4576]: E1203 09:31:11.688456 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94db1892-a7dd-4a07-b181-fa2fbcffe2fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.688481 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="94db1892-a7dd-4a07-b181-fa2fbcffe2fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.688786 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="94db1892-a7dd-4a07-b181-fa2fbcffe2fc" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.689589 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9"] Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.689704 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.704166 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.704245 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.704495 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.704893 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.705217 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.810198 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.811708 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.811858 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.811922 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.811996 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv6zh\" (UniqueName: \"kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.914378 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.914454 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.914563 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.914587 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.914640 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv6zh\" (UniqueName: \"kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:11 crc kubenswrapper[4576]: I1203 09:31:11.921616 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:11.923256 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:11.923269 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:11.931280 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:12.048662 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv6zh\" (UniqueName: \"kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:12.319253 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:31:12 crc kubenswrapper[4576]: I1203 09:31:12.867004 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9"] Dec 03 09:31:13 crc kubenswrapper[4576]: I1203 09:31:13.531275 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" event={"ID":"af426bee-00a4-4c61-be68-87719bd4f285","Type":"ContainerStarted","Data":"cbd9401aa626ea0ea222eb6440f6d60792cf45ce0296393ef92a94df22999838"} Dec 03 09:31:13 crc kubenswrapper[4576]: I1203 09:31:13.531574 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" event={"ID":"af426bee-00a4-4c61-be68-87719bd4f285","Type":"ContainerStarted","Data":"6b91018a816ba30d7455f10077b2df63803869bf55be4789f7c7275f20b8e3c0"} Dec 03 09:31:13 crc kubenswrapper[4576]: I1203 09:31:13.551517 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" podStartSLOduration=2.368996003 podStartE2EDuration="2.551500502s" podCreationTimestamp="2025-12-03 09:31:11 +0000 UTC" firstStartedPulling="2025-12-03 09:31:12.880261924 +0000 UTC m=+3080.266238898" lastFinishedPulling="2025-12-03 09:31:13.062766413 +0000 UTC m=+3080.448743397" observedRunningTime="2025-12-03 09:31:13.549670262 +0000 UTC m=+3080.935647256" watchObservedRunningTime="2025-12-03 09:31:13.551500502 +0000 UTC m=+3080.937477486" Dec 03 09:31:18 crc kubenswrapper[4576]: I1203 09:31:18.677464 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:31:18 crc kubenswrapper[4576]: E1203 09:31:18.678375 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:31:33 crc kubenswrapper[4576]: I1203 09:31:33.682942 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:31:33 crc kubenswrapper[4576]: E1203 09:31:33.683907 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.337029 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.339831 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.375989 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.503908 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.503992 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.504064 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp5qd\" (UniqueName: \"kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.605599 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp5qd\" (UniqueName: \"kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.605726 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.605773 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.606335 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.606408 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.627960 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp5qd\" (UniqueName: \"kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd\") pod \"community-operators-8c8bm\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:39 crc kubenswrapper[4576]: I1203 09:31:39.665286 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:40 crc kubenswrapper[4576]: I1203 09:31:40.251368 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:40 crc kubenswrapper[4576]: I1203 09:31:40.780195 4576 generic.go:334] "Generic (PLEG): container finished" podID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerID="c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a" exitCode=0 Dec 03 09:31:40 crc kubenswrapper[4576]: I1203 09:31:40.780324 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerDied","Data":"c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a"} Dec 03 09:31:40 crc kubenswrapper[4576]: I1203 09:31:40.780510 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerStarted","Data":"f412356660d8159dfd7f670062e700f0556b3259ae3f9472dc717cb8da6b6a55"} Dec 03 09:31:40 crc kubenswrapper[4576]: I1203 09:31:40.782248 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:31:41 crc kubenswrapper[4576]: I1203 09:31:41.791649 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerStarted","Data":"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f"} Dec 03 09:31:43 crc kubenswrapper[4576]: I1203 09:31:43.819938 4576 generic.go:334] "Generic (PLEG): container finished" podID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerID="88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f" exitCode=0 Dec 03 09:31:43 crc kubenswrapper[4576]: I1203 09:31:43.820124 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerDied","Data":"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f"} Dec 03 09:31:44 crc kubenswrapper[4576]: I1203 09:31:44.829872 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerStarted","Data":"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041"} Dec 03 09:31:44 crc kubenswrapper[4576]: I1203 09:31:44.855682 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8c8bm" podStartSLOduration=2.2976397029999998 podStartE2EDuration="5.855659816s" podCreationTimestamp="2025-12-03 09:31:39 +0000 UTC" firstStartedPulling="2025-12-03 09:31:40.781905649 +0000 UTC m=+3108.167882643" lastFinishedPulling="2025-12-03 09:31:44.339925772 +0000 UTC m=+3111.725902756" observedRunningTime="2025-12-03 09:31:44.846924121 +0000 UTC m=+3112.232901105" watchObservedRunningTime="2025-12-03 09:31:44.855659816 +0000 UTC m=+3112.241636800" Dec 03 09:31:46 crc kubenswrapper[4576]: I1203 09:31:46.677681 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:31:46 crc kubenswrapper[4576]: E1203 09:31:46.678221 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:31:49 crc kubenswrapper[4576]: I1203 09:31:49.665858 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:49 crc kubenswrapper[4576]: I1203 09:31:49.666468 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:49 crc kubenswrapper[4576]: I1203 09:31:49.720865 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:49 crc kubenswrapper[4576]: I1203 09:31:49.922099 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:49 crc kubenswrapper[4576]: I1203 09:31:49.967611 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:51 crc kubenswrapper[4576]: I1203 09:31:51.884666 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8c8bm" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="registry-server" containerID="cri-o://89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041" gracePeriod=2 Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.384149 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.557743 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp5qd\" (UniqueName: \"kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd\") pod \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.557851 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities\") pod \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.558075 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content\") pod \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\" (UID: \"e709d229-e466-4b0e-8cc0-8fc7e3ad6910\") " Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.559392 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities" (OuterVolumeSpecName: "utilities") pod "e709d229-e466-4b0e-8cc0-8fc7e3ad6910" (UID: "e709d229-e466-4b0e-8cc0-8fc7e3ad6910"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.564547 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd" (OuterVolumeSpecName: "kube-api-access-jp5qd") pod "e709d229-e466-4b0e-8cc0-8fc7e3ad6910" (UID: "e709d229-e466-4b0e-8cc0-8fc7e3ad6910"). InnerVolumeSpecName "kube-api-access-jp5qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.623318 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e709d229-e466-4b0e-8cc0-8fc7e3ad6910" (UID: "e709d229-e466-4b0e-8cc0-8fc7e3ad6910"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.659948 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp5qd\" (UniqueName: \"kubernetes.io/projected/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-kube-api-access-jp5qd\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.659985 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.659994 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e709d229-e466-4b0e-8cc0-8fc7e3ad6910-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.894267 4576 generic.go:334] "Generic (PLEG): container finished" podID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerID="89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041" exitCode=0 Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.894851 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerDied","Data":"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041"} Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.894939 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8c8bm" event={"ID":"e709d229-e466-4b0e-8cc0-8fc7e3ad6910","Type":"ContainerDied","Data":"f412356660d8159dfd7f670062e700f0556b3259ae3f9472dc717cb8da6b6a55"} Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.895013 4576 scope.go:117] "RemoveContainer" containerID="89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.895204 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8c8bm" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.946541 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.953471 4576 scope.go:117] "RemoveContainer" containerID="88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f" Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.957221 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8c8bm"] Dec 03 09:31:52 crc kubenswrapper[4576]: I1203 09:31:52.982576 4576 scope.go:117] "RemoveContainer" containerID="c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.021168 4576 scope.go:117] "RemoveContainer" containerID="89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041" Dec 03 09:31:53 crc kubenswrapper[4576]: E1203 09:31:53.021504 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041\": container with ID starting with 89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041 not found: ID does not exist" containerID="89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.021619 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041"} err="failed to get container status \"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041\": rpc error: code = NotFound desc = could not find container \"89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041\": container with ID starting with 89a8414cfd622def2892591b82efa45bafee0e9d4e7c520aa6c4082f1f4a6041 not found: ID does not exist" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.021643 4576 scope.go:117] "RemoveContainer" containerID="88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f" Dec 03 09:31:53 crc kubenswrapper[4576]: E1203 09:31:53.021828 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f\": container with ID starting with 88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f not found: ID does not exist" containerID="88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.021852 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f"} err="failed to get container status \"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f\": rpc error: code = NotFound desc = could not find container \"88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f\": container with ID starting with 88eaf7b5dd636d87f6daee1b1818c3b2f2945657b2defb898eb2a3fd7c261e5f not found: ID does not exist" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.021868 4576 scope.go:117] "RemoveContainer" containerID="c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a" Dec 03 09:31:53 crc kubenswrapper[4576]: E1203 09:31:53.022053 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a\": container with ID starting with c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a not found: ID does not exist" containerID="c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.022075 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a"} err="failed to get container status \"c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a\": rpc error: code = NotFound desc = could not find container \"c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a\": container with ID starting with c039e3273f6648a049ddf181e1ce545bebc53a6bf1c89d5310247ee81d4d6f3a not found: ID does not exist" Dec 03 09:31:53 crc kubenswrapper[4576]: I1203 09:31:53.692768 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" path="/var/lib/kubelet/pods/e709d229-e466-4b0e-8cc0-8fc7e3ad6910/volumes" Dec 03 09:32:01 crc kubenswrapper[4576]: I1203 09:32:01.677587 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:32:01 crc kubenswrapper[4576]: E1203 09:32:01.678358 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.883760 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:09 crc kubenswrapper[4576]: E1203 09:32:09.884491 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="extract-content" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.884503 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="extract-content" Dec 03 09:32:09 crc kubenswrapper[4576]: E1203 09:32:09.884559 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="registry-server" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.884566 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="registry-server" Dec 03 09:32:09 crc kubenswrapper[4576]: E1203 09:32:09.884582 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="extract-utilities" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.884588 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="extract-utilities" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.884777 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e709d229-e466-4b0e-8cc0-8fc7e3ad6910" containerName="registry-server" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.886095 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.894440 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.995838 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5vzd\" (UniqueName: \"kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.995995 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:09 crc kubenswrapper[4576]: I1203 09:32:09.996026 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.098754 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5vzd\" (UniqueName: \"kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.098887 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.098917 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.099560 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.099565 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.121486 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5vzd\" (UniqueName: \"kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd\") pod \"redhat-operators-6jkgt\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.244197 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:10 crc kubenswrapper[4576]: I1203 09:32:10.740186 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:11 crc kubenswrapper[4576]: I1203 09:32:11.081942 4576 generic.go:334] "Generic (PLEG): container finished" podID="a8867133-290c-475c-ac49-05cce90e3001" containerID="c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9" exitCode=0 Dec 03 09:32:11 crc kubenswrapper[4576]: I1203 09:32:11.081994 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerDied","Data":"c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9"} Dec 03 09:32:11 crc kubenswrapper[4576]: I1203 09:32:11.082024 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerStarted","Data":"052a1b6d1d33eade333d59b65b2441c579661143615be9ec202e25f5abb8c6c1"} Dec 03 09:32:12 crc kubenswrapper[4576]: I1203 09:32:12.096272 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerStarted","Data":"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb"} Dec 03 09:32:15 crc kubenswrapper[4576]: I1203 09:32:15.678056 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:32:15 crc kubenswrapper[4576]: E1203 09:32:15.678972 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:32:16 crc kubenswrapper[4576]: I1203 09:32:16.145683 4576 generic.go:334] "Generic (PLEG): container finished" podID="a8867133-290c-475c-ac49-05cce90e3001" containerID="b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb" exitCode=0 Dec 03 09:32:16 crc kubenswrapper[4576]: I1203 09:32:16.145832 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerDied","Data":"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb"} Dec 03 09:32:17 crc kubenswrapper[4576]: I1203 09:32:17.159676 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerStarted","Data":"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38"} Dec 03 09:32:17 crc kubenswrapper[4576]: I1203 09:32:17.185945 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6jkgt" podStartSLOduration=2.5735488269999998 podStartE2EDuration="8.185923357s" podCreationTimestamp="2025-12-03 09:32:09 +0000 UTC" firstStartedPulling="2025-12-03 09:32:11.086843182 +0000 UTC m=+3138.472820156" lastFinishedPulling="2025-12-03 09:32:16.699217702 +0000 UTC m=+3144.085194686" observedRunningTime="2025-12-03 09:32:17.177663714 +0000 UTC m=+3144.563640718" watchObservedRunningTime="2025-12-03 09:32:17.185923357 +0000 UTC m=+3144.571900341" Dec 03 09:32:20 crc kubenswrapper[4576]: I1203 09:32:20.245112 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:20 crc kubenswrapper[4576]: I1203 09:32:20.245704 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:21 crc kubenswrapper[4576]: I1203 09:32:21.322810 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6jkgt" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="registry-server" probeResult="failure" output=< Dec 03 09:32:21 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:32:21 crc kubenswrapper[4576]: > Dec 03 09:32:27 crc kubenswrapper[4576]: I1203 09:32:27.677815 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:32:27 crc kubenswrapper[4576]: E1203 09:32:27.678768 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:32:30 crc kubenswrapper[4576]: I1203 09:32:30.307898 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:30 crc kubenswrapper[4576]: I1203 09:32:30.365265 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:30 crc kubenswrapper[4576]: I1203 09:32:30.548629 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.412973 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6jkgt" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="registry-server" containerID="cri-o://785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38" gracePeriod=2 Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.874769 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.924049 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities\") pod \"a8867133-290c-475c-ac49-05cce90e3001\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.924345 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5vzd\" (UniqueName: \"kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd\") pod \"a8867133-290c-475c-ac49-05cce90e3001\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.924413 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content\") pod \"a8867133-290c-475c-ac49-05cce90e3001\" (UID: \"a8867133-290c-475c-ac49-05cce90e3001\") " Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.927368 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities" (OuterVolumeSpecName: "utilities") pod "a8867133-290c-475c-ac49-05cce90e3001" (UID: "a8867133-290c-475c-ac49-05cce90e3001"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:32:32 crc kubenswrapper[4576]: I1203 09:32:32.935436 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd" (OuterVolumeSpecName: "kube-api-access-x5vzd") pod "a8867133-290c-475c-ac49-05cce90e3001" (UID: "a8867133-290c-475c-ac49-05cce90e3001"). InnerVolumeSpecName "kube-api-access-x5vzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.027878 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5vzd\" (UniqueName: \"kubernetes.io/projected/a8867133-290c-475c-ac49-05cce90e3001-kube-api-access-x5vzd\") on node \"crc\" DevicePath \"\"" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.027910 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.041729 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8867133-290c-475c-ac49-05cce90e3001" (UID: "a8867133-290c-475c-ac49-05cce90e3001"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.129872 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8867133-290c-475c-ac49-05cce90e3001-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.426630 4576 generic.go:334] "Generic (PLEG): container finished" podID="a8867133-290c-475c-ac49-05cce90e3001" containerID="785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38" exitCode=0 Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.426694 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerDied","Data":"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38"} Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.426723 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jkgt" event={"ID":"a8867133-290c-475c-ac49-05cce90e3001","Type":"ContainerDied","Data":"052a1b6d1d33eade333d59b65b2441c579661143615be9ec202e25f5abb8c6c1"} Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.426748 4576 scope.go:117] "RemoveContainer" containerID="785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.426745 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jkgt" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.454010 4576 scope.go:117] "RemoveContainer" containerID="b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.478407 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.485972 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6jkgt"] Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.501448 4576 scope.go:117] "RemoveContainer" containerID="c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.538542 4576 scope.go:117] "RemoveContainer" containerID="785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38" Dec 03 09:32:33 crc kubenswrapper[4576]: E1203 09:32:33.539127 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38\": container with ID starting with 785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38 not found: ID does not exist" containerID="785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.539224 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38"} err="failed to get container status \"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38\": rpc error: code = NotFound desc = could not find container \"785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38\": container with ID starting with 785784d566b302a90e8702ae5c7ff4a12720dfa4e61f56f3ca0e22d687ed7a38 not found: ID does not exist" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.539296 4576 scope.go:117] "RemoveContainer" containerID="b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb" Dec 03 09:32:33 crc kubenswrapper[4576]: E1203 09:32:33.539784 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb\": container with ID starting with b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb not found: ID does not exist" containerID="b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.539834 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb"} err="failed to get container status \"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb\": rpc error: code = NotFound desc = could not find container \"b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb\": container with ID starting with b3d7631b4f5122d23e4e124f3bec9a559f6d517e0c8fcc8568b31667e8d00dbb not found: ID does not exist" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.539867 4576 scope.go:117] "RemoveContainer" containerID="c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9" Dec 03 09:32:33 crc kubenswrapper[4576]: E1203 09:32:33.540195 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9\": container with ID starting with c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9 not found: ID does not exist" containerID="c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.540249 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9"} err="failed to get container status \"c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9\": rpc error: code = NotFound desc = could not find container \"c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9\": container with ID starting with c312cd49dbc7adc23bc9d5f689fe663085a0f940cd8ecd4559682657a4ca8fa9 not found: ID does not exist" Dec 03 09:32:33 crc kubenswrapper[4576]: I1203 09:32:33.688676 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8867133-290c-475c-ac49-05cce90e3001" path="/var/lib/kubelet/pods/a8867133-290c-475c-ac49-05cce90e3001/volumes" Dec 03 09:32:39 crc kubenswrapper[4576]: I1203 09:32:39.678844 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:32:39 crc kubenswrapper[4576]: E1203 09:32:39.679855 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:32:54 crc kubenswrapper[4576]: I1203 09:32:54.677137 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:32:54 crc kubenswrapper[4576]: E1203 09:32:54.677918 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:33:06 crc kubenswrapper[4576]: I1203 09:33:06.677089 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:33:06 crc kubenswrapper[4576]: E1203 09:33:06.677923 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:33:17 crc kubenswrapper[4576]: I1203 09:33:17.720338 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:33:17 crc kubenswrapper[4576]: E1203 09:33:17.721762 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:33:29 crc kubenswrapper[4576]: I1203 09:33:29.678063 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:33:29 crc kubenswrapper[4576]: E1203 09:33:29.679017 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:33:40 crc kubenswrapper[4576]: I1203 09:33:40.677892 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:33:40 crc kubenswrapper[4576]: E1203 09:33:40.679652 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:33:51 crc kubenswrapper[4576]: I1203 09:33:51.677790 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:33:51 crc kubenswrapper[4576]: E1203 09:33:51.678681 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:34:04 crc kubenswrapper[4576]: I1203 09:34:04.677000 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:34:04 crc kubenswrapper[4576]: E1203 09:34:04.677678 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:34:19 crc kubenswrapper[4576]: I1203 09:34:19.678037 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:34:19 crc kubenswrapper[4576]: E1203 09:34:19.689718 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:34:34 crc kubenswrapper[4576]: I1203 09:34:34.677932 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:34:34 crc kubenswrapper[4576]: E1203 09:34:34.678805 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:34:48 crc kubenswrapper[4576]: I1203 09:34:48.677486 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:34:48 crc kubenswrapper[4576]: E1203 09:34:48.678745 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:35:00 crc kubenswrapper[4576]: I1203 09:35:00.677198 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:35:00 crc kubenswrapper[4576]: E1203 09:35:00.678256 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:35:12 crc kubenswrapper[4576]: I1203 09:35:12.678267 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:35:13 crc kubenswrapper[4576]: I1203 09:35:13.387766 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85"} Dec 03 09:35:40 crc kubenswrapper[4576]: I1203 09:35:40.723751 4576 generic.go:334] "Generic (PLEG): container finished" podID="af426bee-00a4-4c61-be68-87719bd4f285" containerID="cbd9401aa626ea0ea222eb6440f6d60792cf45ce0296393ef92a94df22999838" exitCode=0 Dec 03 09:35:40 crc kubenswrapper[4576]: I1203 09:35:40.723836 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" event={"ID":"af426bee-00a4-4c61-be68-87719bd4f285","Type":"ContainerDied","Data":"cbd9401aa626ea0ea222eb6440f6d60792cf45ce0296393ef92a94df22999838"} Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.213806 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.331597 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key\") pod \"af426bee-00a4-4c61-be68-87719bd4f285\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.331855 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory\") pod \"af426bee-00a4-4c61-be68-87719bd4f285\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.331883 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0\") pod \"af426bee-00a4-4c61-be68-87719bd4f285\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.331902 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle\") pod \"af426bee-00a4-4c61-be68-87719bd4f285\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.332096 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv6zh\" (UniqueName: \"kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh\") pod \"af426bee-00a4-4c61-be68-87719bd4f285\" (UID: \"af426bee-00a4-4c61-be68-87719bd4f285\") " Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.346152 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "af426bee-00a4-4c61-be68-87719bd4f285" (UID: "af426bee-00a4-4c61-be68-87719bd4f285"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.346171 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh" (OuterVolumeSpecName: "kube-api-access-dv6zh") pod "af426bee-00a4-4c61-be68-87719bd4f285" (UID: "af426bee-00a4-4c61-be68-87719bd4f285"). InnerVolumeSpecName "kube-api-access-dv6zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.363921 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "af426bee-00a4-4c61-be68-87719bd4f285" (UID: "af426bee-00a4-4c61-be68-87719bd4f285"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.369376 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory" (OuterVolumeSpecName: "inventory") pod "af426bee-00a4-4c61-be68-87719bd4f285" (UID: "af426bee-00a4-4c61-be68-87719bd4f285"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.370963 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af426bee-00a4-4c61-be68-87719bd4f285" (UID: "af426bee-00a4-4c61-be68-87719bd4f285"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.434105 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.434297 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.434389 4576 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.434449 4576 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af426bee-00a4-4c61-be68-87719bd4f285-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.434511 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv6zh\" (UniqueName: \"kubernetes.io/projected/af426bee-00a4-4c61-be68-87719bd4f285-kube-api-access-dv6zh\") on node \"crc\" DevicePath \"\"" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.747777 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" event={"ID":"af426bee-00a4-4c61-be68-87719bd4f285","Type":"ContainerDied","Data":"6b91018a816ba30d7455f10077b2df63803869bf55be4789f7c7275f20b8e3c0"} Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.747841 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b91018a816ba30d7455f10077b2df63803869bf55be4789f7c7275f20b8e3c0" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.748249 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.860809 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx"] Dec 03 09:35:42 crc kubenswrapper[4576]: E1203 09:35:42.861326 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="registry-server" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861351 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="registry-server" Dec 03 09:35:42 crc kubenswrapper[4576]: E1203 09:35:42.861378 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="extract-content" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861387 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="extract-content" Dec 03 09:35:42 crc kubenswrapper[4576]: E1203 09:35:42.861417 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af426bee-00a4-4c61-be68-87719bd4f285" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861428 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="af426bee-00a4-4c61-be68-87719bd4f285" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 09:35:42 crc kubenswrapper[4576]: E1203 09:35:42.861447 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="extract-utilities" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861455 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="extract-utilities" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861700 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8867133-290c-475c-ac49-05cce90e3001" containerName="registry-server" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.861737 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="af426bee-00a4-4c61-be68-87719bd4f285" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.862511 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.865302 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.865432 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.865302 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.868428 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.868676 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.868982 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.869152 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 03 09:35:42 crc kubenswrapper[4576]: I1203 09:35:42.873833 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx"] Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046265 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046355 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046406 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046488 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046505 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046596 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtzhb\" (UniqueName: \"kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046631 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046655 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.046708 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149239 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149317 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149423 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtzhb\" (UniqueName: \"kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149445 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149478 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149604 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149683 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149756 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.149816 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.152935 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.156113 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.156718 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.157212 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.157630 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.161868 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.163146 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.165954 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.179391 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtzhb\" (UniqueName: \"kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-649lx\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.185607 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:35:43 crc kubenswrapper[4576]: I1203 09:35:43.770629 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx"] Dec 03 09:35:44 crc kubenswrapper[4576]: I1203 09:35:44.768700 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" event={"ID":"c7424b9a-5544-49a0-af69-fc3d308bf468","Type":"ContainerStarted","Data":"5e1efc51882df25743a5edb9ca10c6de57ebec8d9d88130ff043b99d5c883104"} Dec 03 09:35:44 crc kubenswrapper[4576]: I1203 09:35:44.769071 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" event={"ID":"c7424b9a-5544-49a0-af69-fc3d308bf468","Type":"ContainerStarted","Data":"a4a413a49a440f2790735593c862066b602472f48ed4ab5909ebdcab2e8de779"} Dec 03 09:35:44 crc kubenswrapper[4576]: I1203 09:35:44.797990 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" podStartSLOduration=2.635303098 podStartE2EDuration="2.79796411s" podCreationTimestamp="2025-12-03 09:35:42 +0000 UTC" firstStartedPulling="2025-12-03 09:35:43.760494622 +0000 UTC m=+3351.146471606" lastFinishedPulling="2025-12-03 09:35:43.923155634 +0000 UTC m=+3351.309132618" observedRunningTime="2025-12-03 09:35:44.78557028 +0000 UTC m=+3352.171547274" watchObservedRunningTime="2025-12-03 09:35:44.79796411 +0000 UTC m=+3352.183941094" Dec 03 09:35:52 crc kubenswrapper[4576]: I1203 09:35:52.628829 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-b677c5dc5-pfc4n" podUID="6bf8d1cf-0003-4e48-89f5-7ae1698f27ff" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 03 09:37:01 crc kubenswrapper[4576]: I1203 09:37:01.939105 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:01 crc kubenswrapper[4576]: I1203 09:37:01.947558 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:01 crc kubenswrapper[4576]: I1203 09:37:01.961174 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.001495 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.001581 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.001751 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2h5j\" (UniqueName: \"kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.103276 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.103515 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2h5j\" (UniqueName: \"kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.103604 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.103797 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.104184 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.124507 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2h5j\" (UniqueName: \"kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j\") pod \"redhat-marketplace-qx4z4\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.265747 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:02 crc kubenswrapper[4576]: I1203 09:37:02.890776 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:03 crc kubenswrapper[4576]: I1203 09:37:03.556412 4576 generic.go:334] "Generic (PLEG): container finished" podID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerID="f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927" exitCode=0 Dec 03 09:37:03 crc kubenswrapper[4576]: I1203 09:37:03.556508 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerDied","Data":"f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927"} Dec 03 09:37:03 crc kubenswrapper[4576]: I1203 09:37:03.556763 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerStarted","Data":"df6f7a16f5380058c0de275c5231f1dd4621e15b063c80e19e97d5054653ac7f"} Dec 03 09:37:03 crc kubenswrapper[4576]: I1203 09:37:03.558417 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:37:04 crc kubenswrapper[4576]: I1203 09:37:04.569335 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerStarted","Data":"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569"} Dec 03 09:37:05 crc kubenswrapper[4576]: I1203 09:37:05.580110 4576 generic.go:334] "Generic (PLEG): container finished" podID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerID="92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569" exitCode=0 Dec 03 09:37:05 crc kubenswrapper[4576]: I1203 09:37:05.580200 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerDied","Data":"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569"} Dec 03 09:37:06 crc kubenswrapper[4576]: I1203 09:37:06.598716 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerStarted","Data":"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b"} Dec 03 09:37:06 crc kubenswrapper[4576]: I1203 09:37:06.625572 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qx4z4" podStartSLOduration=3.122157238 podStartE2EDuration="5.625552996s" podCreationTimestamp="2025-12-03 09:37:01 +0000 UTC" firstStartedPulling="2025-12-03 09:37:03.55805152 +0000 UTC m=+3430.944028494" lastFinishedPulling="2025-12-03 09:37:06.061447268 +0000 UTC m=+3433.447424252" observedRunningTime="2025-12-03 09:37:06.616746939 +0000 UTC m=+3434.002723923" watchObservedRunningTime="2025-12-03 09:37:06.625552996 +0000 UTC m=+3434.011529980" Dec 03 09:37:12 crc kubenswrapper[4576]: I1203 09:37:12.267052 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:12 crc kubenswrapper[4576]: I1203 09:37:12.267762 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:12 crc kubenswrapper[4576]: I1203 09:37:12.326505 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:12 crc kubenswrapper[4576]: I1203 09:37:12.762279 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:12 crc kubenswrapper[4576]: I1203 09:37:12.804803 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:14 crc kubenswrapper[4576]: I1203 09:37:14.738172 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qx4z4" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="registry-server" containerID="cri-o://3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b" gracePeriod=2 Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.241472 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.287731 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities\") pod \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.288081 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2h5j\" (UniqueName: \"kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j\") pod \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.288272 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content\") pod \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\" (UID: \"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9\") " Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.288708 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities" (OuterVolumeSpecName: "utilities") pod "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" (UID: "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.305719 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j" (OuterVolumeSpecName: "kube-api-access-q2h5j") pod "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" (UID: "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9"). InnerVolumeSpecName "kube-api-access-q2h5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.317562 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" (UID: "2e0fb795-aefa-4594-b6e7-8906f9a2d1e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.390257 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.390291 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2h5j\" (UniqueName: \"kubernetes.io/projected/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-kube-api-access-q2h5j\") on node \"crc\" DevicePath \"\"" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.390302 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.751029 4576 generic.go:334] "Generic (PLEG): container finished" podID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerID="3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b" exitCode=0 Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.751144 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerDied","Data":"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b"} Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.751367 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qx4z4" event={"ID":"2e0fb795-aefa-4594-b6e7-8906f9a2d1e9","Type":"ContainerDied","Data":"df6f7a16f5380058c0de275c5231f1dd4621e15b063c80e19e97d5054653ac7f"} Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.751408 4576 scope.go:117] "RemoveContainer" containerID="3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.751186 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qx4z4" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.785932 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.789996 4576 scope.go:117] "RemoveContainer" containerID="92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.795437 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qx4z4"] Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.810354 4576 scope.go:117] "RemoveContainer" containerID="f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.859812 4576 scope.go:117] "RemoveContainer" containerID="3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b" Dec 03 09:37:15 crc kubenswrapper[4576]: E1203 09:37:15.860349 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b\": container with ID starting with 3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b not found: ID does not exist" containerID="3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.860381 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b"} err="failed to get container status \"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b\": rpc error: code = NotFound desc = could not find container \"3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b\": container with ID starting with 3f78e63aacde32f9b8a4e5e31f55d73922044bf6a08d0fba91cb82093db6966b not found: ID does not exist" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.860404 4576 scope.go:117] "RemoveContainer" containerID="92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569" Dec 03 09:37:15 crc kubenswrapper[4576]: E1203 09:37:15.860848 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569\": container with ID starting with 92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569 not found: ID does not exist" containerID="92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.860873 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569"} err="failed to get container status \"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569\": rpc error: code = NotFound desc = could not find container \"92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569\": container with ID starting with 92d6f08cd90a687e19741dc2f2d7e5ec34272bfe499f75ead32d9ad2671b5569 not found: ID does not exist" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.860892 4576 scope.go:117] "RemoveContainer" containerID="f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927" Dec 03 09:37:15 crc kubenswrapper[4576]: E1203 09:37:15.861162 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927\": container with ID starting with f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927 not found: ID does not exist" containerID="f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927" Dec 03 09:37:15 crc kubenswrapper[4576]: I1203 09:37:15.861186 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927"} err="failed to get container status \"f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927\": rpc error: code = NotFound desc = could not find container \"f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927\": container with ID starting with f2ba0fcb8d12fdf33c508d312e3ae6aadcc38bac2e12239325c1488aaaa67927 not found: ID does not exist" Dec 03 09:37:17 crc kubenswrapper[4576]: I1203 09:37:17.689256 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" path="/var/lib/kubelet/pods/2e0fb795-aefa-4594-b6e7-8906f9a2d1e9/volumes" Dec 03 09:37:39 crc kubenswrapper[4576]: I1203 09:37:39.681146 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:37:39 crc kubenswrapper[4576]: I1203 09:37:39.681708 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:38:09 crc kubenswrapper[4576]: I1203 09:38:09.681389 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:38:09 crc kubenswrapper[4576]: I1203 09:38:09.681891 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:38:32 crc kubenswrapper[4576]: I1203 09:38:31.609228 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" podUID="649283f6-ebcd-45a0-974f-e9c14138fa46" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.71:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:38:32 crc kubenswrapper[4576]: I1203 09:38:31.652030 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-nsggq" podUID="649283f6-ebcd-45a0-974f-e9c14138fa46" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.71:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 09:38:39 crc kubenswrapper[4576]: I1203 09:38:39.681114 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:38:39 crc kubenswrapper[4576]: I1203 09:38:39.681738 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:38:39 crc kubenswrapper[4576]: I1203 09:38:39.688706 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:38:39 crc kubenswrapper[4576]: I1203 09:38:39.689574 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:38:39 crc kubenswrapper[4576]: I1203 09:38:39.689647 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85" gracePeriod=600 Dec 03 09:38:40 crc kubenswrapper[4576]: I1203 09:38:40.141135 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85" exitCode=0 Dec 03 09:38:40 crc kubenswrapper[4576]: I1203 09:38:40.141222 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85"} Dec 03 09:38:40 crc kubenswrapper[4576]: I1203 09:38:40.141662 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2"} Dec 03 09:38:40 crc kubenswrapper[4576]: I1203 09:38:40.141687 4576 scope.go:117] "RemoveContainer" containerID="51c850a542b7285323ba43ef888f5d2233dbe9ae9ee471959ff54bc0da4a11c1" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.225946 4576 generic.go:334] "Generic (PLEG): container finished" podID="c7424b9a-5544-49a0-af69-fc3d308bf468" containerID="5e1efc51882df25743a5edb9ca10c6de57ebec8d9d88130ff043b99d5c883104" exitCode=0 Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.225997 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" event={"ID":"c7424b9a-5544-49a0-af69-fc3d308bf468","Type":"ContainerDied","Data":"5e1efc51882df25743a5edb9ca10c6de57ebec8d9d88130ff043b99d5c883104"} Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.892900 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:38:48 crc kubenswrapper[4576]: E1203 09:38:48.893735 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="extract-content" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.893760 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="extract-content" Dec 03 09:38:48 crc kubenswrapper[4576]: E1203 09:38:48.893803 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="extract-utilities" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.893811 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="extract-utilities" Dec 03 09:38:48 crc kubenswrapper[4576]: E1203 09:38:48.893830 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="registry-server" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.893838 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="registry-server" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.894078 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e0fb795-aefa-4594-b6e7-8906f9a2d1e9" containerName="registry-server" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.896198 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.905936 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.961154 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.961367 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:48 crc kubenswrapper[4576]: I1203 09:38:48.961407 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxwdj\" (UniqueName: \"kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.063229 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.063791 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.063978 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.064092 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxwdj\" (UniqueName: \"kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.064617 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.088687 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxwdj\" (UniqueName: \"kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj\") pod \"certified-operators-6s2fr\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.218856 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:49 crc kubenswrapper[4576]: I1203 09:38:49.893360 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.054270 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092289 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092414 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092553 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtzhb\" (UniqueName: \"kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092636 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092669 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092703 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092757 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.092778 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.112212 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.113178 4576 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.117186 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb" (OuterVolumeSpecName: "kube-api-access-wtzhb") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "kube-api-access-wtzhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.167926 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.170276 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.182632 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.189298 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.191085 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.200813 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory" (OuterVolumeSpecName: "inventory") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.214675 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0\") pod \"c7424b9a-5544-49a0-af69-fc3d308bf468\" (UID: \"c7424b9a-5544-49a0-af69-fc3d308bf468\") " Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215150 4576 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215168 4576 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215177 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215186 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215195 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtzhb\" (UniqueName: \"kubernetes.io/projected/c7424b9a-5544-49a0-af69-fc3d308bf468-kube-api-access-wtzhb\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215204 4576 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.215213 4576 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.249190 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "c7424b9a-5544-49a0-af69-fc3d308bf468" (UID: "c7424b9a-5544-49a0-af69-fc3d308bf468"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.264235 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerStarted","Data":"1339b78f7df5ec0f17d633294334461ac5b77030253fc7b5f0abc278e474764a"} Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.267879 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" event={"ID":"c7424b9a-5544-49a0-af69-fc3d308bf468","Type":"ContainerDied","Data":"a4a413a49a440f2790735593c862066b602472f48ed4ab5909ebdcab2e8de779"} Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.268073 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4a413a49a440f2790735593c862066b602472f48ed4ab5909ebdcab2e8de779" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.268264 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-649lx" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.317075 4576 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c7424b9a-5544-49a0-af69-fc3d308bf468-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.370634 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm"] Dec 03 09:38:50 crc kubenswrapper[4576]: E1203 09:38:50.371120 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7424b9a-5544-49a0-af69-fc3d308bf468" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.371146 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7424b9a-5544-49a0-af69-fc3d308bf468" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.371398 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7424b9a-5544-49a0-af69-fc3d308bf468" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.372203 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.374510 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.374510 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.376794 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.377119 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.377134 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-sl8dp" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.389946 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm"] Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.418711 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.418801 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.418894 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxdld\" (UniqueName: \"kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.418931 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.418964 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.419000 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.419070 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.520674 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521035 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521090 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxdld\" (UniqueName: \"kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521112 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521133 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521156 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.521206 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.526390 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.528558 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.530055 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.531125 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.531939 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.538085 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.544177 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxdld\" (UniqueName: \"kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:50 crc kubenswrapper[4576]: I1203 09:38:50.692591 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:38:51 crc kubenswrapper[4576]: I1203 09:38:51.229646 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm"] Dec 03 09:38:51 crc kubenswrapper[4576]: I1203 09:38:51.276229 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" event={"ID":"f432497e-88f4-424f-beb0-856c58fb586d","Type":"ContainerStarted","Data":"d99627ccce41f36b56728f4c0b307cae252d6d7e94448deead8abd6c0d6266c2"} Dec 03 09:38:51 crc kubenswrapper[4576]: I1203 09:38:51.277552 4576 generic.go:334] "Generic (PLEG): container finished" podID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerID="2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2" exitCode=0 Dec 03 09:38:51 crc kubenswrapper[4576]: I1203 09:38:51.277593 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerDied","Data":"2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2"} Dec 03 09:38:52 crc kubenswrapper[4576]: I1203 09:38:52.287043 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" event={"ID":"f432497e-88f4-424f-beb0-856c58fb586d","Type":"ContainerStarted","Data":"48b1b84d581eb71d942839d95e8fbcb981b4353783b1014608e5972aa1ad2a31"} Dec 03 09:38:52 crc kubenswrapper[4576]: I1203 09:38:52.292217 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerStarted","Data":"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d"} Dec 03 09:38:52 crc kubenswrapper[4576]: I1203 09:38:52.323398 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" podStartSLOduration=2.169854793 podStartE2EDuration="2.32337478s" podCreationTimestamp="2025-12-03 09:38:50 +0000 UTC" firstStartedPulling="2025-12-03 09:38:51.249791671 +0000 UTC m=+3538.635768655" lastFinishedPulling="2025-12-03 09:38:51.403311658 +0000 UTC m=+3538.789288642" observedRunningTime="2025-12-03 09:38:52.309995766 +0000 UTC m=+3539.695972790" watchObservedRunningTime="2025-12-03 09:38:52.32337478 +0000 UTC m=+3539.709351764" Dec 03 09:38:53 crc kubenswrapper[4576]: I1203 09:38:53.302228 4576 generic.go:334] "Generic (PLEG): container finished" podID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerID="7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d" exitCode=0 Dec 03 09:38:53 crc kubenswrapper[4576]: I1203 09:38:53.302349 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerDied","Data":"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d"} Dec 03 09:38:54 crc kubenswrapper[4576]: I1203 09:38:54.313866 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerStarted","Data":"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4"} Dec 03 09:38:54 crc kubenswrapper[4576]: I1203 09:38:54.350907 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6s2fr" podStartSLOduration=3.925961028 podStartE2EDuration="6.350888454s" podCreationTimestamp="2025-12-03 09:38:48 +0000 UTC" firstStartedPulling="2025-12-03 09:38:51.279015175 +0000 UTC m=+3538.664992159" lastFinishedPulling="2025-12-03 09:38:53.703942591 +0000 UTC m=+3541.089919585" observedRunningTime="2025-12-03 09:38:54.340940558 +0000 UTC m=+3541.726917542" watchObservedRunningTime="2025-12-03 09:38:54.350888454 +0000 UTC m=+3541.736865438" Dec 03 09:38:59 crc kubenswrapper[4576]: I1203 09:38:59.219205 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:59 crc kubenswrapper[4576]: I1203 09:38:59.219842 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:59 crc kubenswrapper[4576]: I1203 09:38:59.285902 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:59 crc kubenswrapper[4576]: I1203 09:38:59.426143 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:38:59 crc kubenswrapper[4576]: I1203 09:38:59.529237 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:39:01 crc kubenswrapper[4576]: I1203 09:39:01.391915 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6s2fr" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="registry-server" containerID="cri-o://fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4" gracePeriod=2 Dec 03 09:39:01 crc kubenswrapper[4576]: I1203 09:39:01.985113 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.048574 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content\") pod \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.048648 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities\") pod \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.048715 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxwdj\" (UniqueName: \"kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj\") pod \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\" (UID: \"700fb063-7b50-4aa8-8653-2f77be2b9f4e\") " Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.049970 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities" (OuterVolumeSpecName: "utilities") pod "700fb063-7b50-4aa8-8653-2f77be2b9f4e" (UID: "700fb063-7b50-4aa8-8653-2f77be2b9f4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.070338 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj" (OuterVolumeSpecName: "kube-api-access-kxwdj") pod "700fb063-7b50-4aa8-8653-2f77be2b9f4e" (UID: "700fb063-7b50-4aa8-8653-2f77be2b9f4e"). InnerVolumeSpecName "kube-api-access-kxwdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.108614 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "700fb063-7b50-4aa8-8653-2f77be2b9f4e" (UID: "700fb063-7b50-4aa8-8653-2f77be2b9f4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.151170 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.151201 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/700fb063-7b50-4aa8-8653-2f77be2b9f4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.151211 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxwdj\" (UniqueName: \"kubernetes.io/projected/700fb063-7b50-4aa8-8653-2f77be2b9f4e-kube-api-access-kxwdj\") on node \"crc\" DevicePath \"\"" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.404606 4576 generic.go:334] "Generic (PLEG): container finished" podID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerID="fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4" exitCode=0 Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.404656 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerDied","Data":"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4"} Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.404713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6s2fr" event={"ID":"700fb063-7b50-4aa8-8653-2f77be2b9f4e","Type":"ContainerDied","Data":"1339b78f7df5ec0f17d633294334461ac5b77030253fc7b5f0abc278e474764a"} Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.404720 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6s2fr" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.404742 4576 scope.go:117] "RemoveContainer" containerID="fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.428095 4576 scope.go:117] "RemoveContainer" containerID="7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.454736 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.466874 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6s2fr"] Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.470711 4576 scope.go:117] "RemoveContainer" containerID="2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.530009 4576 scope.go:117] "RemoveContainer" containerID="fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4" Dec 03 09:39:02 crc kubenswrapper[4576]: E1203 09:39:02.530605 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4\": container with ID starting with fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4 not found: ID does not exist" containerID="fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.530672 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4"} err="failed to get container status \"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4\": rpc error: code = NotFound desc = could not find container \"fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4\": container with ID starting with fc7dea11b03580b538d101e13c4f8397f38036d2bdc5867e217da379ba7d98c4 not found: ID does not exist" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.530708 4576 scope.go:117] "RemoveContainer" containerID="7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d" Dec 03 09:39:02 crc kubenswrapper[4576]: E1203 09:39:02.531135 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d\": container with ID starting with 7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d not found: ID does not exist" containerID="7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.531216 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d"} err="failed to get container status \"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d\": rpc error: code = NotFound desc = could not find container \"7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d\": container with ID starting with 7621bb7f45f587ec38eaf3dd2fcb48cc5fbeefebf4c80d30007109d39c91c88d not found: ID does not exist" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.531237 4576 scope.go:117] "RemoveContainer" containerID="2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2" Dec 03 09:39:02 crc kubenswrapper[4576]: E1203 09:39:02.531610 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2\": container with ID starting with 2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2 not found: ID does not exist" containerID="2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2" Dec 03 09:39:02 crc kubenswrapper[4576]: I1203 09:39:02.531647 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2"} err="failed to get container status \"2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2\": rpc error: code = NotFound desc = could not find container \"2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2\": container with ID starting with 2d110674f3c4022e49d8ee5d37c4549b9b6d6360a6efa4c15183d15d45064de2 not found: ID does not exist" Dec 03 09:39:03 crc kubenswrapper[4576]: I1203 09:39:03.688195 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" path="/var/lib/kubelet/pods/700fb063-7b50-4aa8-8653-2f77be2b9f4e/volumes" Dec 03 09:41:09 crc kubenswrapper[4576]: I1203 09:41:09.680555 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:41:09 crc kubenswrapper[4576]: I1203 09:41:09.681318 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:41:39 crc kubenswrapper[4576]: I1203 09:41:39.681495 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:41:39 crc kubenswrapper[4576]: I1203 09:41:39.682160 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:42:09 crc kubenswrapper[4576]: I1203 09:42:09.681182 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:42:09 crc kubenswrapper[4576]: I1203 09:42:09.681738 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:42:09 crc kubenswrapper[4576]: I1203 09:42:09.687147 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:42:09 crc kubenswrapper[4576]: I1203 09:42:09.687870 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:42:09 crc kubenswrapper[4576]: I1203 09:42:09.687929 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" gracePeriod=600 Dec 03 09:42:10 crc kubenswrapper[4576]: I1203 09:42:10.473906 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" exitCode=0 Dec 03 09:42:10 crc kubenswrapper[4576]: I1203 09:42:10.473957 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2"} Dec 03 09:42:10 crc kubenswrapper[4576]: I1203 09:42:10.473994 4576 scope.go:117] "RemoveContainer" containerID="707ccaf0ef5f353850073889c61e083c71eff648ae35958f1c9dff2475427c85" Dec 03 09:42:10 crc kubenswrapper[4576]: E1203 09:42:10.530470 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:42:11 crc kubenswrapper[4576]: I1203 09:42:11.485379 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:42:11 crc kubenswrapper[4576]: E1203 09:42:11.486055 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:42:12 crc kubenswrapper[4576]: I1203 09:42:12.495931 4576 generic.go:334] "Generic (PLEG): container finished" podID="f432497e-88f4-424f-beb0-856c58fb586d" containerID="48b1b84d581eb71d942839d95e8fbcb981b4353783b1014608e5972aa1ad2a31" exitCode=0 Dec 03 09:42:12 crc kubenswrapper[4576]: I1203 09:42:12.495993 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" event={"ID":"f432497e-88f4-424f-beb0-856c58fb586d","Type":"ContainerDied","Data":"48b1b84d581eb71d942839d95e8fbcb981b4353783b1014608e5972aa1ad2a31"} Dec 03 09:42:13 crc kubenswrapper[4576]: I1203 09:42:13.957132 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108485 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108625 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108705 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108832 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108881 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.108978 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.109067 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxdld\" (UniqueName: \"kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld\") pod \"f432497e-88f4-424f-beb0-856c58fb586d\" (UID: \"f432497e-88f4-424f-beb0-856c58fb586d\") " Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.123750 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld" (OuterVolumeSpecName: "kube-api-access-bxdld") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "kube-api-access-bxdld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.123903 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.139638 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.141127 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory" (OuterVolumeSpecName: "inventory") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.141945 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.143106 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.157658 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "f432497e-88f4-424f-beb0-856c58fb586d" (UID: "f432497e-88f4-424f-beb0-856c58fb586d"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212682 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212714 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxdld\" (UniqueName: \"kubernetes.io/projected/f432497e-88f4-424f-beb0-856c58fb586d-kube-api-access-bxdld\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212724 4576 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212732 4576 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212741 4576 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212750 4576 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.212759 4576 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f432497e-88f4-424f-beb0-856c58fb586d-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.516101 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" event={"ID":"f432497e-88f4-424f-beb0-856c58fb586d","Type":"ContainerDied","Data":"d99627ccce41f36b56728f4c0b307cae252d6d7e94448deead8abd6c0d6266c2"} Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.516144 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d99627ccce41f36b56728f4c0b307cae252d6d7e94448deead8abd6c0d6266c2" Dec 03 09:42:14 crc kubenswrapper[4576]: I1203 09:42:14.516170 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm" Dec 03 09:42:24 crc kubenswrapper[4576]: I1203 09:42:24.678769 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:42:24 crc kubenswrapper[4576]: E1203 09:42:24.679809 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.426264 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:28 crc kubenswrapper[4576]: E1203 09:42:28.427121 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f432497e-88f4-424f-beb0-856c58fb586d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427145 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f432497e-88f4-424f-beb0-856c58fb586d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 09:42:28 crc kubenswrapper[4576]: E1203 09:42:28.427167 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="extract-content" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427174 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="extract-content" Dec 03 09:42:28 crc kubenswrapper[4576]: E1203 09:42:28.427186 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="registry-server" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427193 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="registry-server" Dec 03 09:42:28 crc kubenswrapper[4576]: E1203 09:42:28.427213 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="extract-utilities" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427220 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="extract-utilities" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427495 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f432497e-88f4-424f-beb0-856c58fb586d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.427556 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="700fb063-7b50-4aa8-8653-2f77be2b9f4e" containerName="registry-server" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.429189 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.446479 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.532628 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n44l\" (UniqueName: \"kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.532810 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.533023 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.635316 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.635461 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n44l\" (UniqueName: \"kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.635503 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.635809 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.635895 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.670987 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n44l\" (UniqueName: \"kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l\") pod \"redhat-operators-7zbzk\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:28 crc kubenswrapper[4576]: I1203 09:42:28.802983 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:29 crc kubenswrapper[4576]: I1203 09:42:29.335313 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:29 crc kubenswrapper[4576]: W1203 09:42:29.336984 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2754eb53_1298_494a_a1af_6a93c85408c9.slice/crio-7109f3eccb82e0e6cf64893a6f53349fff16d563f7209f08eb5aa76e553a3afe WatchSource:0}: Error finding container 7109f3eccb82e0e6cf64893a6f53349fff16d563f7209f08eb5aa76e553a3afe: Status 404 returned error can't find the container with id 7109f3eccb82e0e6cf64893a6f53349fff16d563f7209f08eb5aa76e553a3afe Dec 03 09:42:29 crc kubenswrapper[4576]: I1203 09:42:29.662024 4576 generic.go:334] "Generic (PLEG): container finished" podID="2754eb53-1298-494a-a1af-6a93c85408c9" containerID="f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f" exitCode=0 Dec 03 09:42:29 crc kubenswrapper[4576]: I1203 09:42:29.662082 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerDied","Data":"f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f"} Dec 03 09:42:29 crc kubenswrapper[4576]: I1203 09:42:29.662344 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerStarted","Data":"7109f3eccb82e0e6cf64893a6f53349fff16d563f7209f08eb5aa76e553a3afe"} Dec 03 09:42:29 crc kubenswrapper[4576]: I1203 09:42:29.663980 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:42:31 crc kubenswrapper[4576]: I1203 09:42:31.690364 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerStarted","Data":"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293"} Dec 03 09:42:35 crc kubenswrapper[4576]: I1203 09:42:35.728650 4576 generic.go:334] "Generic (PLEG): container finished" podID="2754eb53-1298-494a-a1af-6a93c85408c9" containerID="fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293" exitCode=0 Dec 03 09:42:35 crc kubenswrapper[4576]: I1203 09:42:35.728733 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerDied","Data":"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293"} Dec 03 09:42:37 crc kubenswrapper[4576]: I1203 09:42:37.751310 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerStarted","Data":"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37"} Dec 03 09:42:37 crc kubenswrapper[4576]: I1203 09:42:37.787877 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7zbzk" podStartSLOduration=2.29759526 podStartE2EDuration="9.787854106s" podCreationTimestamp="2025-12-03 09:42:28 +0000 UTC" firstStartedPulling="2025-12-03 09:42:29.663696271 +0000 UTC m=+3757.049673255" lastFinishedPulling="2025-12-03 09:42:37.153955117 +0000 UTC m=+3764.539932101" observedRunningTime="2025-12-03 09:42:37.777821865 +0000 UTC m=+3765.163798859" watchObservedRunningTime="2025-12-03 09:42:37.787854106 +0000 UTC m=+3765.173831090" Dec 03 09:42:38 crc kubenswrapper[4576]: I1203 09:42:38.803432 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:38 crc kubenswrapper[4576]: I1203 09:42:38.803824 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:39 crc kubenswrapper[4576]: I1203 09:42:39.679144 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:42:39 crc kubenswrapper[4576]: E1203 09:42:39.679440 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:42:39 crc kubenswrapper[4576]: I1203 09:42:39.856059 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7zbzk" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="registry-server" probeResult="failure" output=< Dec 03 09:42:39 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:42:39 crc kubenswrapper[4576]: > Dec 03 09:42:48 crc kubenswrapper[4576]: I1203 09:42:48.863000 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:48 crc kubenswrapper[4576]: I1203 09:42:48.917299 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:49 crc kubenswrapper[4576]: I1203 09:42:49.104412 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:50 crc kubenswrapper[4576]: I1203 09:42:50.677990 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:42:50 crc kubenswrapper[4576]: E1203 09:42:50.679076 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:42:50 crc kubenswrapper[4576]: I1203 09:42:50.866393 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7zbzk" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="registry-server" containerID="cri-o://1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37" gracePeriod=2 Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.819292 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.877403 4576 generic.go:334] "Generic (PLEG): container finished" podID="2754eb53-1298-494a-a1af-6a93c85408c9" containerID="1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37" exitCode=0 Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.877444 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerDied","Data":"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37"} Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.877473 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zbzk" event={"ID":"2754eb53-1298-494a-a1af-6a93c85408c9","Type":"ContainerDied","Data":"7109f3eccb82e0e6cf64893a6f53349fff16d563f7209f08eb5aa76e553a3afe"} Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.877492 4576 scope.go:117] "RemoveContainer" containerID="1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.877542 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zbzk" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.898057 4576 scope.go:117] "RemoveContainer" containerID="fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.937470 4576 scope.go:117] "RemoveContainer" containerID="f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.981744 4576 scope.go:117] "RemoveContainer" containerID="1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37" Dec 03 09:42:51 crc kubenswrapper[4576]: E1203 09:42:51.982242 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37\": container with ID starting with 1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37 not found: ID does not exist" containerID="1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.982285 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37"} err="failed to get container status \"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37\": rpc error: code = NotFound desc = could not find container \"1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37\": container with ID starting with 1245d22e39bbd9379e7f7c04e4d0ce654b7d003e0f38db5d45c99cebe560ab37 not found: ID does not exist" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.982305 4576 scope.go:117] "RemoveContainer" containerID="fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293" Dec 03 09:42:51 crc kubenswrapper[4576]: E1203 09:42:51.982568 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293\": container with ID starting with fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293 not found: ID does not exist" containerID="fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.982594 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293"} err="failed to get container status \"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293\": rpc error: code = NotFound desc = could not find container \"fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293\": container with ID starting with fa7e299caed7e2c6ac17f51334509b25d17ae63bb7870eb19028833fe8483293 not found: ID does not exist" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.982615 4576 scope.go:117] "RemoveContainer" containerID="f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f" Dec 03 09:42:51 crc kubenswrapper[4576]: E1203 09:42:51.982985 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f\": container with ID starting with f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f not found: ID does not exist" containerID="f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.983002 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f"} err="failed to get container status \"f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f\": rpc error: code = NotFound desc = could not find container \"f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f\": container with ID starting with f4738b221dba56b1cd781db4fcf928d1b506cbdb95b26d5d731040557dce107f not found: ID does not exist" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.987175 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content\") pod \"2754eb53-1298-494a-a1af-6a93c85408c9\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.987309 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n44l\" (UniqueName: \"kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l\") pod \"2754eb53-1298-494a-a1af-6a93c85408c9\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.987566 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities\") pod \"2754eb53-1298-494a-a1af-6a93c85408c9\" (UID: \"2754eb53-1298-494a-a1af-6a93c85408c9\") " Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.988147 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities" (OuterVolumeSpecName: "utilities") pod "2754eb53-1298-494a-a1af-6a93c85408c9" (UID: "2754eb53-1298-494a-a1af-6a93c85408c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:42:51 crc kubenswrapper[4576]: I1203 09:42:51.996771 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l" (OuterVolumeSpecName: "kube-api-access-6n44l") pod "2754eb53-1298-494a-a1af-6a93c85408c9" (UID: "2754eb53-1298-494a-a1af-6a93c85408c9"). InnerVolumeSpecName "kube-api-access-6n44l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.091099 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.091131 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n44l\" (UniqueName: \"kubernetes.io/projected/2754eb53-1298-494a-a1af-6a93c85408c9-kube-api-access-6n44l\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.142054 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2754eb53-1298-494a-a1af-6a93c85408c9" (UID: "2754eb53-1298-494a-a1af-6a93c85408c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.191988 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2754eb53-1298-494a-a1af-6a93c85408c9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.212376 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:52 crc kubenswrapper[4576]: I1203 09:42:52.220744 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7zbzk"] Dec 03 09:42:53 crc kubenswrapper[4576]: I1203 09:42:53.690820 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" path="/var/lib/kubelet/pods/2754eb53-1298-494a-a1af-6a93c85408c9/volumes" Dec 03 09:43:01 crc kubenswrapper[4576]: I1203 09:43:01.677612 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:43:01 crc kubenswrapper[4576]: E1203 09:43:01.678360 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:43:16 crc kubenswrapper[4576]: I1203 09:43:16.677868 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:43:16 crc kubenswrapper[4576]: E1203 09:43:16.679129 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.164241 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 09:43:18 crc kubenswrapper[4576]: E1203 09:43:18.165211 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="extract-utilities" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.165225 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="extract-utilities" Dec 03 09:43:18 crc kubenswrapper[4576]: E1203 09:43:18.165240 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="registry-server" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.165246 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="registry-server" Dec 03 09:43:18 crc kubenswrapper[4576]: E1203 09:43:18.165279 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="extract-content" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.165287 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="extract-content" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.165464 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="2754eb53-1298-494a-a1af-6a93c85408c9" containerName="registry-server" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.166636 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.168889 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.168993 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-2cmsr" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.169121 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.169583 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.184444 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.358024 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.358469 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9p29\" (UniqueName: \"kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.358628 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.358810 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.359046 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.359196 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.359500 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.359776 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.359966 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462043 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9p29\" (UniqueName: \"kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462131 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462180 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462270 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462316 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462428 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462655 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462696 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.462747 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.463061 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.463943 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.464041 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.464147 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.470382 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.472586 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.473138 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.490502 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.492251 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9p29\" (UniqueName: \"kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.526188 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " pod="openstack/tempest-tests-tempest" Dec 03 09:43:18 crc kubenswrapper[4576]: I1203 09:43:18.792310 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 09:43:19 crc kubenswrapper[4576]: I1203 09:43:19.275017 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 09:43:19 crc kubenswrapper[4576]: W1203 09:43:19.280863 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34096dc4_8175_4637_916a_9e52376b8c08.slice/crio-83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58 WatchSource:0}: Error finding container 83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58: Status 404 returned error can't find the container with id 83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58 Dec 03 09:43:20 crc kubenswrapper[4576]: I1203 09:43:20.203835 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"34096dc4-8175-4637-916a-9e52376b8c08","Type":"ContainerStarted","Data":"83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58"} Dec 03 09:43:28 crc kubenswrapper[4576]: I1203 09:43:28.677241 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:43:28 crc kubenswrapper[4576]: E1203 09:43:28.680640 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:43:41 crc kubenswrapper[4576]: I1203 09:43:41.676876 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:43:41 crc kubenswrapper[4576]: E1203 09:43:41.677691 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:43:54 crc kubenswrapper[4576]: I1203 09:43:54.678673 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:43:54 crc kubenswrapper[4576]: E1203 09:43:54.679809 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:43:57 crc kubenswrapper[4576]: E1203 09:43:57.357201 4576 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 03 09:43:57 crc kubenswrapper[4576]: E1203 09:43:57.359000 4576 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t9p29,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(34096dc4-8175-4637-916a-9e52376b8c08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 09:43:57 crc kubenswrapper[4576]: E1203 09:43:57.360299 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="34096dc4-8175-4637-916a-9e52376b8c08" Dec 03 09:43:57 crc kubenswrapper[4576]: E1203 09:43:57.600026 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="34096dc4-8175-4637-916a-9e52376b8c08" Dec 03 09:44:07 crc kubenswrapper[4576]: I1203 09:44:07.677298 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:44:07 crc kubenswrapper[4576]: E1203 09:44:07.677992 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:44:09 crc kubenswrapper[4576]: I1203 09:44:09.406240 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 09:44:11 crc kubenswrapper[4576]: I1203 09:44:11.745948 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"34096dc4-8175-4637-916a-9e52376b8c08","Type":"ContainerStarted","Data":"95fba86fc10497a6aceba808d3334f2aa2ce11feb0407bd279ef2e7374fc3c49"} Dec 03 09:44:11 crc kubenswrapper[4576]: I1203 09:44:11.773324 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.652932811 podStartE2EDuration="54.773306465s" podCreationTimestamp="2025-12-03 09:43:17 +0000 UTC" firstStartedPulling="2025-12-03 09:43:19.283574693 +0000 UTC m=+3806.669551667" lastFinishedPulling="2025-12-03 09:44:09.403948337 +0000 UTC m=+3856.789925321" observedRunningTime="2025-12-03 09:44:11.77087109 +0000 UTC m=+3859.156848074" watchObservedRunningTime="2025-12-03 09:44:11.773306465 +0000 UTC m=+3859.159283449" Dec 03 09:44:19 crc kubenswrapper[4576]: I1203 09:44:19.678184 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:44:19 crc kubenswrapper[4576]: E1203 09:44:19.680081 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:44:31 crc kubenswrapper[4576]: I1203 09:44:31.677486 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:44:31 crc kubenswrapper[4576]: E1203 09:44:31.678555 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:44:43 crc kubenswrapper[4576]: I1203 09:44:43.684748 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:44:43 crc kubenswrapper[4576]: E1203 09:44:43.685720 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:44:55 crc kubenswrapper[4576]: I1203 09:44:55.677735 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:44:55 crc kubenswrapper[4576]: E1203 09:44:55.678512 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.233960 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh"] Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.235881 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.238150 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.247541 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.266428 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh"] Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.360318 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.360754 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.360795 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk7m5\" (UniqueName: \"kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.462465 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.462518 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk7m5\" (UniqueName: \"kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.462672 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.464035 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.476486 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.481164 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk7m5\" (UniqueName: \"kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5\") pod \"collect-profiles-29412585-mfmzh\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:00 crc kubenswrapper[4576]: I1203 09:45:00.559587 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:01 crc kubenswrapper[4576]: I1203 09:45:01.944484 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh"] Dec 03 09:45:02 crc kubenswrapper[4576]: I1203 09:45:02.330805 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" event={"ID":"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a","Type":"ContainerStarted","Data":"8aa4a6b321c909ed07bb5e82c8d95e42406555e9e769acd0cd146818a6990133"} Dec 03 09:45:02 crc kubenswrapper[4576]: I1203 09:45:02.331073 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" event={"ID":"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a","Type":"ContainerStarted","Data":"deb7447c1e7cb83229662019aad79a06a2bfaf64f8ee56a439be315e733d01ee"} Dec 03 09:45:02 crc kubenswrapper[4576]: I1203 09:45:02.380762 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" podStartSLOduration=2.380744406 podStartE2EDuration="2.380744406s" podCreationTimestamp="2025-12-03 09:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 09:45:02.377767065 +0000 UTC m=+3909.763744049" watchObservedRunningTime="2025-12-03 09:45:02.380744406 +0000 UTC m=+3909.766721390" Dec 03 09:45:03 crc kubenswrapper[4576]: I1203 09:45:03.344055 4576 generic.go:334] "Generic (PLEG): container finished" podID="70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" containerID="8aa4a6b321c909ed07bb5e82c8d95e42406555e9e769acd0cd146818a6990133" exitCode=0 Dec 03 09:45:03 crc kubenswrapper[4576]: I1203 09:45:03.344123 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" event={"ID":"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a","Type":"ContainerDied","Data":"8aa4a6b321c909ed07bb5e82c8d95e42406555e9e769acd0cd146818a6990133"} Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.858082 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.956484 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume\") pod \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.956986 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk7m5\" (UniqueName: \"kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5\") pod \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.957203 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume\") pod \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\" (UID: \"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a\") " Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.957830 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume" (OuterVolumeSpecName: "config-volume") pod "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" (UID: "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.958190 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.974275 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5" (OuterVolumeSpecName: "kube-api-access-vk7m5") pod "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" (UID: "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a"). InnerVolumeSpecName "kube-api-access-vk7m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:45:04 crc kubenswrapper[4576]: I1203 09:45:04.977863 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" (UID: "70a286c7-98b0-42c3-badb-d0f9ee2e0b8a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.030027 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls"] Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.043363 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412540-hchls"] Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.060369 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk7m5\" (UniqueName: \"kubernetes.io/projected/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-kube-api-access-vk7m5\") on node \"crc\" DevicePath \"\"" Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.060410 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70a286c7-98b0-42c3-badb-d0f9ee2e0b8a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.362553 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" event={"ID":"70a286c7-98b0-42c3-badb-d0f9ee2e0b8a","Type":"ContainerDied","Data":"deb7447c1e7cb83229662019aad79a06a2bfaf64f8ee56a439be315e733d01ee"} Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.362817 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="deb7447c1e7cb83229662019aad79a06a2bfaf64f8ee56a439be315e733d01ee" Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.362616 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412585-mfmzh" Dec 03 09:45:05 crc kubenswrapper[4576]: I1203 09:45:05.719354 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7234af20-614a-4fe5-adbb-68515a638bf9" path="/var/lib/kubelet/pods/7234af20-614a-4fe5-adbb-68515a638bf9/volumes" Dec 03 09:45:06 crc kubenswrapper[4576]: I1203 09:45:06.677140 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:45:06 crc kubenswrapper[4576]: E1203 09:45:06.677724 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:45:21 crc kubenswrapper[4576]: I1203 09:45:21.678665 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:45:21 crc kubenswrapper[4576]: E1203 09:45:21.679434 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:45:32 crc kubenswrapper[4576]: I1203 09:45:32.677944 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:45:32 crc kubenswrapper[4576]: E1203 09:45:32.678986 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:45:46 crc kubenswrapper[4576]: I1203 09:45:46.678316 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:45:46 crc kubenswrapper[4576]: E1203 09:45:46.680474 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:45:57 crc kubenswrapper[4576]: I1203 09:45:57.347091 4576 scope.go:117] "RemoveContainer" containerID="b31f24842a68cba54f0597872edb8d9c492f7cd6fc0c3b867eb9898f16c3171b" Dec 03 09:46:00 crc kubenswrapper[4576]: I1203 09:46:00.677596 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:46:00 crc kubenswrapper[4576]: E1203 09:46:00.678566 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:46:15 crc kubenswrapper[4576]: I1203 09:46:15.677943 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:46:15 crc kubenswrapper[4576]: E1203 09:46:15.680110 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:46:30 crc kubenswrapper[4576]: I1203 09:46:30.676872 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:46:30 crc kubenswrapper[4576]: E1203 09:46:30.678235 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:46:42 crc kubenswrapper[4576]: I1203 09:46:42.677892 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:46:42 crc kubenswrapper[4576]: E1203 09:46:42.678725 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:46:55 crc kubenswrapper[4576]: I1203 09:46:55.678445 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:46:55 crc kubenswrapper[4576]: E1203 09:46:55.679292 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.047934 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:07 crc kubenswrapper[4576]: E1203 09:47:07.049951 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" containerName="collect-profiles" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.049975 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" containerName="collect-profiles" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.050270 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="70a286c7-98b0-42c3-badb-d0f9ee2e0b8a" containerName="collect-profiles" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.053809 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.060377 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.218720 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.218853 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.218909 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cchq\" (UniqueName: \"kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.321118 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.321235 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.321289 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cchq\" (UniqueName: \"kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.321698 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.321807 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.343629 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cchq\" (UniqueName: \"kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq\") pod \"redhat-marketplace-5cxvq\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.376621 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.929903 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:07 crc kubenswrapper[4576]: I1203 09:47:07.990732 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerStarted","Data":"b29233ff1471c589e6f718c2354905204a49500e202e3ef511b7967be13fc9b3"} Dec 03 09:47:09 crc kubenswrapper[4576]: I1203 09:47:09.000958 4576 generic.go:334] "Generic (PLEG): container finished" podID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerID="eb045fb3d9108758cd275f373ef786829fc43826afdeaf63ca69eaceecb0cbce" exitCode=0 Dec 03 09:47:09 crc kubenswrapper[4576]: I1203 09:47:09.001050 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerDied","Data":"eb045fb3d9108758cd275f373ef786829fc43826afdeaf63ca69eaceecb0cbce"} Dec 03 09:47:10 crc kubenswrapper[4576]: I1203 09:47:10.014244 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerStarted","Data":"a6b6faba9a966e4c1e86a082a1aad0a1084ef99099ad4ef4f3b1830f16197d34"} Dec 03 09:47:10 crc kubenswrapper[4576]: I1203 09:47:10.677180 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:47:11 crc kubenswrapper[4576]: I1203 09:47:11.025727 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc"} Dec 03 09:47:11 crc kubenswrapper[4576]: I1203 09:47:11.027647 4576 generic.go:334] "Generic (PLEG): container finished" podID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerID="a6b6faba9a966e4c1e86a082a1aad0a1084ef99099ad4ef4f3b1830f16197d34" exitCode=0 Dec 03 09:47:11 crc kubenswrapper[4576]: I1203 09:47:11.027682 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerDied","Data":"a6b6faba9a966e4c1e86a082a1aad0a1084ef99099ad4ef4f3b1830f16197d34"} Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.048949 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerStarted","Data":"cb1be3e5d86271c32cd93f5c640bae266fe7b51f2788869b0e4d045bacb380ee"} Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.082859 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5cxvq" podStartSLOduration=2.521796143 podStartE2EDuration="5.082829068s" podCreationTimestamp="2025-12-03 09:47:07 +0000 UTC" firstStartedPulling="2025-12-03 09:47:09.004095049 +0000 UTC m=+4036.390072033" lastFinishedPulling="2025-12-03 09:47:11.565127974 +0000 UTC m=+4038.951104958" observedRunningTime="2025-12-03 09:47:12.076415945 +0000 UTC m=+4039.462392929" watchObservedRunningTime="2025-12-03 09:47:12.082829068 +0000 UTC m=+4039.468806052" Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.829336 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.835767 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.840191 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.973941 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.974051 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:12 crc kubenswrapper[4576]: I1203 09:47:12.974132 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9d2v\" (UniqueName: \"kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.075865 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.076157 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.076282 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9d2v\" (UniqueName: \"kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.076575 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.076750 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.110808 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9d2v\" (UniqueName: \"kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v\") pod \"community-operators-hdvkp\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:13 crc kubenswrapper[4576]: I1203 09:47:13.160666 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:14 crc kubenswrapper[4576]: I1203 09:47:14.324224 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:14 crc kubenswrapper[4576]: W1203 09:47:14.335037 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f4606f7_a68b_4535_acd0_74e4801ece24.slice/crio-d1a18221804e2eab1676fcf89a121523c2cd02fd68d137fe785abf97db7e8c74 WatchSource:0}: Error finding container d1a18221804e2eab1676fcf89a121523c2cd02fd68d137fe785abf97db7e8c74: Status 404 returned error can't find the container with id d1a18221804e2eab1676fcf89a121523c2cd02fd68d137fe785abf97db7e8c74 Dec 03 09:47:15 crc kubenswrapper[4576]: I1203 09:47:15.151054 4576 generic.go:334] "Generic (PLEG): container finished" podID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerID="a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204" exitCode=0 Dec 03 09:47:15 crc kubenswrapper[4576]: I1203 09:47:15.151276 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerDied","Data":"a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204"} Dec 03 09:47:15 crc kubenswrapper[4576]: I1203 09:47:15.151318 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerStarted","Data":"d1a18221804e2eab1676fcf89a121523c2cd02fd68d137fe785abf97db7e8c74"} Dec 03 09:47:17 crc kubenswrapper[4576]: I1203 09:47:17.215333 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerStarted","Data":"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0"} Dec 03 09:47:17 crc kubenswrapper[4576]: I1203 09:47:17.402604 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:17 crc kubenswrapper[4576]: I1203 09:47:17.403914 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:18 crc kubenswrapper[4576]: I1203 09:47:18.227126 4576 generic.go:334] "Generic (PLEG): container finished" podID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerID="aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0" exitCode=0 Dec 03 09:47:18 crc kubenswrapper[4576]: I1203 09:47:18.227707 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerDied","Data":"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0"} Dec 03 09:47:18 crc kubenswrapper[4576]: I1203 09:47:18.552336 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-5cxvq" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="registry-server" probeResult="failure" output=< Dec 03 09:47:18 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:47:18 crc kubenswrapper[4576]: > Dec 03 09:47:19 crc kubenswrapper[4576]: I1203 09:47:19.254777 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerStarted","Data":"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9"} Dec 03 09:47:19 crc kubenswrapper[4576]: I1203 09:47:19.295141 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hdvkp" podStartSLOduration=3.678090873 podStartE2EDuration="7.295120416s" podCreationTimestamp="2025-12-03 09:47:12 +0000 UTC" firstStartedPulling="2025-12-03 09:47:15.153334376 +0000 UTC m=+4042.539311360" lastFinishedPulling="2025-12-03 09:47:18.770363919 +0000 UTC m=+4046.156340903" observedRunningTime="2025-12-03 09:47:19.280718196 +0000 UTC m=+4046.666695190" watchObservedRunningTime="2025-12-03 09:47:19.295120416 +0000 UTC m=+4046.681097400" Dec 03 09:47:23 crc kubenswrapper[4576]: I1203 09:47:23.161180 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:23 crc kubenswrapper[4576]: I1203 09:47:23.161854 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:24 crc kubenswrapper[4576]: I1203 09:47:24.232826 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hdvkp" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="registry-server" probeResult="failure" output=< Dec 03 09:47:24 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:47:24 crc kubenswrapper[4576]: > Dec 03 09:47:27 crc kubenswrapper[4576]: I1203 09:47:27.427708 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:27 crc kubenswrapper[4576]: I1203 09:47:27.482368 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:27 crc kubenswrapper[4576]: I1203 09:47:27.694996 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:29 crc kubenswrapper[4576]: I1203 09:47:29.360069 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5cxvq" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="registry-server" containerID="cri-o://cb1be3e5d86271c32cd93f5c640bae266fe7b51f2788869b0e4d045bacb380ee" gracePeriod=2 Dec 03 09:47:30 crc kubenswrapper[4576]: I1203 09:47:30.370278 4576 generic.go:334] "Generic (PLEG): container finished" podID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerID="cb1be3e5d86271c32cd93f5c640bae266fe7b51f2788869b0e4d045bacb380ee" exitCode=0 Dec 03 09:47:30 crc kubenswrapper[4576]: I1203 09:47:30.370398 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerDied","Data":"cb1be3e5d86271c32cd93f5c640bae266fe7b51f2788869b0e4d045bacb380ee"} Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.806991 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.869629 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities\") pod \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.869853 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cchq\" (UniqueName: \"kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq\") pod \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.869981 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content\") pod \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\" (UID: \"a02513e1-b3e7-4517-a1d2-79efb6156b4e\") " Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.874613 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities" (OuterVolumeSpecName: "utilities") pod "a02513e1-b3e7-4517-a1d2-79efb6156b4e" (UID: "a02513e1-b3e7-4517-a1d2-79efb6156b4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.902901 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq" (OuterVolumeSpecName: "kube-api-access-6cchq") pod "a02513e1-b3e7-4517-a1d2-79efb6156b4e" (UID: "a02513e1-b3e7-4517-a1d2-79efb6156b4e"). InnerVolumeSpecName "kube-api-access-6cchq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.928632 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a02513e1-b3e7-4517-a1d2-79efb6156b4e" (UID: "a02513e1-b3e7-4517-a1d2-79efb6156b4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.973702 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.973733 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a02513e1-b3e7-4517-a1d2-79efb6156b4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:30.973742 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cchq\" (UniqueName: \"kubernetes.io/projected/a02513e1-b3e7-4517-a1d2-79efb6156b4e-kube-api-access-6cchq\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.382297 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5cxvq" event={"ID":"a02513e1-b3e7-4517-a1d2-79efb6156b4e","Type":"ContainerDied","Data":"b29233ff1471c589e6f718c2354905204a49500e202e3ef511b7967be13fc9b3"} Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.382348 4576 scope.go:117] "RemoveContainer" containerID="cb1be3e5d86271c32cd93f5c640bae266fe7b51f2788869b0e4d045bacb380ee" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.382467 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5cxvq" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.448094 4576 scope.go:117] "RemoveContainer" containerID="a6b6faba9a966e4c1e86a082a1aad0a1084ef99099ad4ef4f3b1830f16197d34" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.454822 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.478706 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5cxvq"] Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.488142 4576 scope.go:117] "RemoveContainer" containerID="eb045fb3d9108758cd275f373ef786829fc43826afdeaf63ca69eaceecb0cbce" Dec 03 09:47:31 crc kubenswrapper[4576]: I1203 09:47:31.691116 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" path="/var/lib/kubelet/pods/a02513e1-b3e7-4517-a1d2-79efb6156b4e/volumes" Dec 03 09:47:33 crc kubenswrapper[4576]: I1203 09:47:33.225602 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:33 crc kubenswrapper[4576]: I1203 09:47:33.294174 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:34 crc kubenswrapper[4576]: I1203 09:47:34.076036 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:34 crc kubenswrapper[4576]: I1203 09:47:34.413077 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hdvkp" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="registry-server" containerID="cri-o://97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9" gracePeriod=2 Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.327687 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.369182 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content\") pod \"9f4606f7-a68b-4535-acd0-74e4801ece24\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.369301 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9d2v\" (UniqueName: \"kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v\") pod \"9f4606f7-a68b-4535-acd0-74e4801ece24\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.369357 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities\") pod \"9f4606f7-a68b-4535-acd0-74e4801ece24\" (UID: \"9f4606f7-a68b-4535-acd0-74e4801ece24\") " Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.370590 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities" (OuterVolumeSpecName: "utilities") pod "9f4606f7-a68b-4535-acd0-74e4801ece24" (UID: "9f4606f7-a68b-4535-acd0-74e4801ece24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.390963 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v" (OuterVolumeSpecName: "kube-api-access-p9d2v") pod "9f4606f7-a68b-4535-acd0-74e4801ece24" (UID: "9f4606f7-a68b-4535-acd0-74e4801ece24"). InnerVolumeSpecName "kube-api-access-p9d2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.429911 4576 generic.go:334] "Generic (PLEG): container finished" podID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerID="97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9" exitCode=0 Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.429953 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerDied","Data":"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9"} Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.429992 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdvkp" event={"ID":"9f4606f7-a68b-4535-acd0-74e4801ece24","Type":"ContainerDied","Data":"d1a18221804e2eab1676fcf89a121523c2cd02fd68d137fe785abf97db7e8c74"} Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.430011 4576 scope.go:117] "RemoveContainer" containerID="97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.430173 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdvkp" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.440054 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f4606f7-a68b-4535-acd0-74e4801ece24" (UID: "9f4606f7-a68b-4535-acd0-74e4801ece24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.466678 4576 scope.go:117] "RemoveContainer" containerID="aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.471732 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.471772 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9d2v\" (UniqueName: \"kubernetes.io/projected/9f4606f7-a68b-4535-acd0-74e4801ece24-kube-api-access-p9d2v\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.471818 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f4606f7-a68b-4535-acd0-74e4801ece24-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.498936 4576 scope.go:117] "RemoveContainer" containerID="a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.547057 4576 scope.go:117] "RemoveContainer" containerID="97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9" Dec 03 09:47:35 crc kubenswrapper[4576]: E1203 09:47:35.569990 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9\": container with ID starting with 97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9 not found: ID does not exist" containerID="97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.570051 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9"} err="failed to get container status \"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9\": rpc error: code = NotFound desc = could not find container \"97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9\": container with ID starting with 97ec90c9382d0af35da9b0353bad5c5831092755edc599ae1f955634decc80c9 not found: ID does not exist" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.570085 4576 scope.go:117] "RemoveContainer" containerID="aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0" Dec 03 09:47:35 crc kubenswrapper[4576]: E1203 09:47:35.570453 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0\": container with ID starting with aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0 not found: ID does not exist" containerID="aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.570504 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0"} err="failed to get container status \"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0\": rpc error: code = NotFound desc = could not find container \"aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0\": container with ID starting with aa2e7186daf18ce34bd34ad0420b772292b113ba8d44e5eab721b82460a766c0 not found: ID does not exist" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.570553 4576 scope.go:117] "RemoveContainer" containerID="a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204" Dec 03 09:47:35 crc kubenswrapper[4576]: E1203 09:47:35.571009 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204\": container with ID starting with a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204 not found: ID does not exist" containerID="a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.571031 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204"} err="failed to get container status \"a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204\": rpc error: code = NotFound desc = could not find container \"a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204\": container with ID starting with a57e070d4afda0840d4007d733598ed7e8e2a26e40b4ca615c1fac9112ea4204 not found: ID does not exist" Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.754500 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:35 crc kubenswrapper[4576]: I1203 09:47:35.807541 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hdvkp"] Dec 03 09:47:37 crc kubenswrapper[4576]: I1203 09:47:37.690971 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" path="/var/lib/kubelet/pods/9f4606f7-a68b-4535-acd0-74e4801ece24/volumes" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.314800 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nl2dd"] Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315772 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="extract-utilities" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315789 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="extract-utilities" Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315801 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="extract-content" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315807 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="extract-content" Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315839 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="extract-content" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315846 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="extract-content" Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315858 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315864 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315879 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315885 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: E1203 09:49:17.315900 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="extract-utilities" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.315906 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="extract-utilities" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.316101 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="a02513e1-b3e7-4517-a1d2-79efb6156b4e" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.316117 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f4606f7-a68b-4535-acd0-74e4801ece24" containerName="registry-server" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.322938 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.334981 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nl2dd"] Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.383081 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-catalog-content\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.383132 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-utilities\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.383160 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pxk2\" (UniqueName: \"kubernetes.io/projected/2fde46f0-4823-4c59-bd4d-31b63c794d13-kube-api-access-9pxk2\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.484634 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-catalog-content\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.484715 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-utilities\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.484745 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pxk2\" (UniqueName: \"kubernetes.io/projected/2fde46f0-4823-4c59-bd4d-31b63c794d13-kube-api-access-9pxk2\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.485439 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-catalog-content\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.485590 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fde46f0-4823-4c59-bd4d-31b63c794d13-utilities\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.512510 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pxk2\" (UniqueName: \"kubernetes.io/projected/2fde46f0-4823-4c59-bd4d-31b63c794d13-kube-api-access-9pxk2\") pod \"certified-operators-nl2dd\" (UID: \"2fde46f0-4823-4c59-bd4d-31b63c794d13\") " pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:17 crc kubenswrapper[4576]: I1203 09:49:17.650726 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:18 crc kubenswrapper[4576]: I1203 09:49:18.277846 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nl2dd"] Dec 03 09:49:18 crc kubenswrapper[4576]: I1203 09:49:18.422418 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nl2dd" event={"ID":"2fde46f0-4823-4c59-bd4d-31b63c794d13","Type":"ContainerStarted","Data":"85f7f9f7cf8c51c4515491f8e4c6f6743533c6013f76fd038e33ce25d960760c"} Dec 03 09:49:19 crc kubenswrapper[4576]: I1203 09:49:19.434164 4576 generic.go:334] "Generic (PLEG): container finished" podID="2fde46f0-4823-4c59-bd4d-31b63c794d13" containerID="948f9c958d11295cc9b3d743e79c4b630977098e3bb4054a0156320420bf4d30" exitCode=0 Dec 03 09:49:19 crc kubenswrapper[4576]: I1203 09:49:19.434215 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nl2dd" event={"ID":"2fde46f0-4823-4c59-bd4d-31b63c794d13","Type":"ContainerDied","Data":"948f9c958d11295cc9b3d743e79c4b630977098e3bb4054a0156320420bf4d30"} Dec 03 09:49:19 crc kubenswrapper[4576]: I1203 09:49:19.438110 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:49:25 crc kubenswrapper[4576]: I1203 09:49:25.544477 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nl2dd" event={"ID":"2fde46f0-4823-4c59-bd4d-31b63c794d13","Type":"ContainerStarted","Data":"d75247089d7a7f0128e020fd2546a65c0f393fe9221a0c6f4dfbfd428e07abe2"} Dec 03 09:49:26 crc kubenswrapper[4576]: I1203 09:49:26.557082 4576 generic.go:334] "Generic (PLEG): container finished" podID="2fde46f0-4823-4c59-bd4d-31b63c794d13" containerID="d75247089d7a7f0128e020fd2546a65c0f393fe9221a0c6f4dfbfd428e07abe2" exitCode=0 Dec 03 09:49:26 crc kubenswrapper[4576]: I1203 09:49:26.557200 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nl2dd" event={"ID":"2fde46f0-4823-4c59-bd4d-31b63c794d13","Type":"ContainerDied","Data":"d75247089d7a7f0128e020fd2546a65c0f393fe9221a0c6f4dfbfd428e07abe2"} Dec 03 09:49:27 crc kubenswrapper[4576]: I1203 09:49:27.568602 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nl2dd" event={"ID":"2fde46f0-4823-4c59-bd4d-31b63c794d13","Type":"ContainerStarted","Data":"17389101530ed2f1ddb2b9baa7b602d5d4c2431d7fbb460d7d364ccf4baefb3a"} Dec 03 09:49:27 crc kubenswrapper[4576]: I1203 09:49:27.589433 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nl2dd" podStartSLOduration=2.989766647 podStartE2EDuration="10.589411232s" podCreationTimestamp="2025-12-03 09:49:17 +0000 UTC" firstStartedPulling="2025-12-03 09:49:19.437368563 +0000 UTC m=+4166.823345547" lastFinishedPulling="2025-12-03 09:49:27.037013148 +0000 UTC m=+4174.422990132" observedRunningTime="2025-12-03 09:49:27.587270794 +0000 UTC m=+4174.973247778" watchObservedRunningTime="2025-12-03 09:49:27.589411232 +0000 UTC m=+4174.975388206" Dec 03 09:49:27 crc kubenswrapper[4576]: I1203 09:49:27.651554 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:27 crc kubenswrapper[4576]: I1203 09:49:27.651604 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:28 crc kubenswrapper[4576]: I1203 09:49:28.713304 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-nl2dd" podUID="2fde46f0-4823-4c59-bd4d-31b63c794d13" containerName="registry-server" probeResult="failure" output=< Dec 03 09:49:28 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:49:28 crc kubenswrapper[4576]: > Dec 03 09:49:37 crc kubenswrapper[4576]: I1203 09:49:37.733498 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:37 crc kubenswrapper[4576]: I1203 09:49:37.788095 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nl2dd" Dec 03 09:49:37 crc kubenswrapper[4576]: I1203 09:49:37.870013 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nl2dd"] Dec 03 09:49:37 crc kubenswrapper[4576]: I1203 09:49:37.980368 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 09:49:37 crc kubenswrapper[4576]: I1203 09:49:37.980886 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tt6tx" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="registry-server" containerID="cri-o://fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b" gracePeriod=2 Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.651143 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.708607 4576 generic.go:334] "Generic (PLEG): container finished" podID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerID="fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b" exitCode=0 Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.710053 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tt6tx" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.710541 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerDied","Data":"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b"} Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.710646 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tt6tx" event={"ID":"542eeeaf-465f-4cd8-a64d-32fde07cf5e3","Type":"ContainerDied","Data":"f99a1ea7119f5e1f14a49b74d8a633ab2c94bc8b2646d0e52e496ff473d689a3"} Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.710749 4576 scope.go:117] "RemoveContainer" containerID="fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.748785 4576 scope.go:117] "RemoveContainer" containerID="5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.835049 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv5z4\" (UniqueName: \"kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4\") pod \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.835149 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content\") pod \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.835242 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities\") pod \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\" (UID: \"542eeeaf-465f-4cd8-a64d-32fde07cf5e3\") " Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.857684 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities" (OuterVolumeSpecName: "utilities") pod "542eeeaf-465f-4cd8-a64d-32fde07cf5e3" (UID: "542eeeaf-465f-4cd8-a64d-32fde07cf5e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.886235 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4" (OuterVolumeSpecName: "kube-api-access-fv5z4") pod "542eeeaf-465f-4cd8-a64d-32fde07cf5e3" (UID: "542eeeaf-465f-4cd8-a64d-32fde07cf5e3"). InnerVolumeSpecName "kube-api-access-fv5z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.933099 4576 scope.go:117] "RemoveContainer" containerID="a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.937957 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.937978 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv5z4\" (UniqueName: \"kubernetes.io/projected/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-kube-api-access-fv5z4\") on node \"crc\" DevicePath \"\"" Dec 03 09:49:38 crc kubenswrapper[4576]: I1203 09:49:38.962244 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "542eeeaf-465f-4cd8-a64d-32fde07cf5e3" (UID: "542eeeaf-465f-4cd8-a64d-32fde07cf5e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.009864 4576 scope.go:117] "RemoveContainer" containerID="fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b" Dec 03 09:49:39 crc kubenswrapper[4576]: E1203 09:49:39.015670 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b\": container with ID starting with fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b not found: ID does not exist" containerID="fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.015718 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b"} err="failed to get container status \"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b\": rpc error: code = NotFound desc = could not find container \"fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b\": container with ID starting with fe92901014a72e8a6e7814d394d8bc9bbb754cfb9f73bdf541d049e1333e369b not found: ID does not exist" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.015750 4576 scope.go:117] "RemoveContainer" containerID="5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2" Dec 03 09:49:39 crc kubenswrapper[4576]: E1203 09:49:39.018717 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2\": container with ID starting with 5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2 not found: ID does not exist" containerID="5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.018754 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2"} err="failed to get container status \"5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2\": rpc error: code = NotFound desc = could not find container \"5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2\": container with ID starting with 5414a29088f21b18e33980c8352be8c9a5a75160861bc35a3b934d7e490445b2 not found: ID does not exist" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.018783 4576 scope.go:117] "RemoveContainer" containerID="a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776" Dec 03 09:49:39 crc kubenswrapper[4576]: E1203 09:49:39.020480 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776\": container with ID starting with a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776 not found: ID does not exist" containerID="a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.020544 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776"} err="failed to get container status \"a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776\": rpc error: code = NotFound desc = could not find container \"a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776\": container with ID starting with a32b0e0ed1a60ff33f05600ad851b13e7bd51c804643060494d2d71929c20776 not found: ID does not exist" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.042269 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542eeeaf-465f-4cd8-a64d-32fde07cf5e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.064808 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.078324 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tt6tx"] Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.681221 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.681633 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:49:39 crc kubenswrapper[4576]: I1203 09:49:39.686630 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" path="/var/lib/kubelet/pods/542eeeaf-465f-4cd8-a64d-32fde07cf5e3/volumes" Dec 03 09:50:09 crc kubenswrapper[4576]: I1203 09:50:09.681026 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:50:09 crc kubenswrapper[4576]: I1203 09:50:09.681585 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:50:39 crc kubenswrapper[4576]: I1203 09:50:39.681062 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:50:39 crc kubenswrapper[4576]: I1203 09:50:39.681673 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:50:39 crc kubenswrapper[4576]: I1203 09:50:39.691872 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:50:39 crc kubenswrapper[4576]: I1203 09:50:39.692677 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:50:39 crc kubenswrapper[4576]: I1203 09:50:39.692742 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc" gracePeriod=600 Dec 03 09:50:40 crc kubenswrapper[4576]: I1203 09:50:40.321064 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc" exitCode=0 Dec 03 09:50:40 crc kubenswrapper[4576]: I1203 09:50:40.321155 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc"} Dec 03 09:50:40 crc kubenswrapper[4576]: I1203 09:50:40.321400 4576 scope.go:117] "RemoveContainer" containerID="203ce6e7360eeb498dd9ac293ee7cc7c5ecc0d5e931b63f02d758470199459d2" Dec 03 09:50:41 crc kubenswrapper[4576]: I1203 09:50:41.334713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79"} Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.546651 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:52:53 crc kubenswrapper[4576]: E1203 09:52:53.547709 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="extract-utilities" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.547729 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="extract-utilities" Dec 03 09:52:53 crc kubenswrapper[4576]: E1203 09:52:53.547740 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="extract-content" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.547748 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="extract-content" Dec 03 09:52:53 crc kubenswrapper[4576]: E1203 09:52:53.547782 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="registry-server" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.547790 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="registry-server" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.548017 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="542eeeaf-465f-4cd8-a64d-32fde07cf5e3" containerName="registry-server" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.549768 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.570668 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.639462 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7kpg\" (UniqueName: \"kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.639757 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.640022 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.742245 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.742415 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7kpg\" (UniqueName: \"kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.742474 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.742882 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.742933 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.775128 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7kpg\" (UniqueName: \"kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg\") pod \"redhat-operators-d6xmw\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:53 crc kubenswrapper[4576]: I1203 09:52:53.869477 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:52:54 crc kubenswrapper[4576]: I1203 09:52:54.417996 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:52:54 crc kubenswrapper[4576]: I1203 09:52:54.890221 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerStarted","Data":"cc48b44696ca3e0d8e83e31362a598febc127fa5c2f2a0af0cab7885a969b5c5"} Dec 03 09:52:55 crc kubenswrapper[4576]: I1203 09:52:55.904397 4576 generic.go:334] "Generic (PLEG): container finished" podID="879b2044-307c-43a5-a641-b81827d1fcdf" containerID="7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973" exitCode=0 Dec 03 09:52:55 crc kubenswrapper[4576]: I1203 09:52:55.904572 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerDied","Data":"7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973"} Dec 03 09:52:57 crc kubenswrapper[4576]: I1203 09:52:57.928106 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerStarted","Data":"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15"} Dec 03 09:52:59 crc kubenswrapper[4576]: I1203 09:52:59.949479 4576 generic.go:334] "Generic (PLEG): container finished" podID="879b2044-307c-43a5-a641-b81827d1fcdf" containerID="9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15" exitCode=0 Dec 03 09:52:59 crc kubenswrapper[4576]: I1203 09:52:59.949634 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerDied","Data":"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15"} Dec 03 09:53:00 crc kubenswrapper[4576]: I1203 09:53:00.961683 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerStarted","Data":"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a"} Dec 03 09:53:01 crc kubenswrapper[4576]: I1203 09:53:01.000826 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d6xmw" podStartSLOduration=3.529921491 podStartE2EDuration="8.000805511s" podCreationTimestamp="2025-12-03 09:52:53 +0000 UTC" firstStartedPulling="2025-12-03 09:52:55.909307435 +0000 UTC m=+4383.295284439" lastFinishedPulling="2025-12-03 09:53:00.380191465 +0000 UTC m=+4387.766168459" observedRunningTime="2025-12-03 09:53:00.988738536 +0000 UTC m=+4388.374715520" watchObservedRunningTime="2025-12-03 09:53:01.000805511 +0000 UTC m=+4388.386782495" Dec 03 09:53:03 crc kubenswrapper[4576]: I1203 09:53:03.869811 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:03 crc kubenswrapper[4576]: I1203 09:53:03.870393 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:04 crc kubenswrapper[4576]: I1203 09:53:04.918755 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d6xmw" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="registry-server" probeResult="failure" output=< Dec 03 09:53:04 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 09:53:04 crc kubenswrapper[4576]: > Dec 03 09:53:09 crc kubenswrapper[4576]: I1203 09:53:09.681271 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:53:09 crc kubenswrapper[4576]: I1203 09:53:09.681866 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:53:13 crc kubenswrapper[4576]: I1203 09:53:13.937709 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:14 crc kubenswrapper[4576]: I1203 09:53:14.011389 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:14 crc kubenswrapper[4576]: I1203 09:53:14.175138 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.135514 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d6xmw" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="registry-server" containerID="cri-o://8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a" gracePeriod=2 Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.744068 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.842074 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content\") pod \"879b2044-307c-43a5-a641-b81827d1fcdf\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.842159 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7kpg\" (UniqueName: \"kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg\") pod \"879b2044-307c-43a5-a641-b81827d1fcdf\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.842204 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities\") pod \"879b2044-307c-43a5-a641-b81827d1fcdf\" (UID: \"879b2044-307c-43a5-a641-b81827d1fcdf\") " Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.843913 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities" (OuterVolumeSpecName: "utilities") pod "879b2044-307c-43a5-a641-b81827d1fcdf" (UID: "879b2044-307c-43a5-a641-b81827d1fcdf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.868123 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg" (OuterVolumeSpecName: "kube-api-access-k7kpg") pod "879b2044-307c-43a5-a641-b81827d1fcdf" (UID: "879b2044-307c-43a5-a641-b81827d1fcdf"). InnerVolumeSpecName "kube-api-access-k7kpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.944177 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7kpg\" (UniqueName: \"kubernetes.io/projected/879b2044-307c-43a5-a641-b81827d1fcdf-kube-api-access-k7kpg\") on node \"crc\" DevicePath \"\"" Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.944219 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:53:15 crc kubenswrapper[4576]: I1203 09:53:15.976268 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "879b2044-307c-43a5-a641-b81827d1fcdf" (UID: "879b2044-307c-43a5-a641-b81827d1fcdf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.046224 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879b2044-307c-43a5-a641-b81827d1fcdf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.200658 4576 generic.go:334] "Generic (PLEG): container finished" podID="879b2044-307c-43a5-a641-b81827d1fcdf" containerID="8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a" exitCode=0 Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.200709 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerDied","Data":"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a"} Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.200741 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d6xmw" event={"ID":"879b2044-307c-43a5-a641-b81827d1fcdf","Type":"ContainerDied","Data":"cc48b44696ca3e0d8e83e31362a598febc127fa5c2f2a0af0cab7885a969b5c5"} Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.200764 4576 scope.go:117] "RemoveContainer" containerID="8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.200935 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d6xmw" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.240060 4576 scope.go:117] "RemoveContainer" containerID="9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.242614 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.254822 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d6xmw"] Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.267154 4576 scope.go:117] "RemoveContainer" containerID="7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.318511 4576 scope.go:117] "RemoveContainer" containerID="8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a" Dec 03 09:53:16 crc kubenswrapper[4576]: E1203 09:53:16.318923 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a\": container with ID starting with 8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a not found: ID does not exist" containerID="8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.318950 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a"} err="failed to get container status \"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a\": rpc error: code = NotFound desc = could not find container \"8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a\": container with ID starting with 8529343d36a9976e3990f202bb8edad2edbe58268a11590efc86e4549cb4d71a not found: ID does not exist" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.318970 4576 scope.go:117] "RemoveContainer" containerID="9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15" Dec 03 09:53:16 crc kubenswrapper[4576]: E1203 09:53:16.320076 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15\": container with ID starting with 9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15 not found: ID does not exist" containerID="9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.320101 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15"} err="failed to get container status \"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15\": rpc error: code = NotFound desc = could not find container \"9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15\": container with ID starting with 9973d9f3fb8dc99cc2639afbf39975a07ed4a2bb1beafca9e3b5516bb9663b15 not found: ID does not exist" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.320117 4576 scope.go:117] "RemoveContainer" containerID="7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973" Dec 03 09:53:16 crc kubenswrapper[4576]: E1203 09:53:16.321881 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973\": container with ID starting with 7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973 not found: ID does not exist" containerID="7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973" Dec 03 09:53:16 crc kubenswrapper[4576]: I1203 09:53:16.321916 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973"} err="failed to get container status \"7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973\": rpc error: code = NotFound desc = could not find container \"7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973\": container with ID starting with 7ab391822e66596648a4b349949ebc6351e92b6c6087d76e00b8e2673c048973 not found: ID does not exist" Dec 03 09:53:17 crc kubenswrapper[4576]: I1203 09:53:17.690728 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" path="/var/lib/kubelet/pods/879b2044-307c-43a5-a641-b81827d1fcdf/volumes" Dec 03 09:53:39 crc kubenswrapper[4576]: I1203 09:53:39.680338 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:53:39 crc kubenswrapper[4576]: I1203 09:53:39.680856 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:54:09 crc kubenswrapper[4576]: I1203 09:54:09.681312 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 09:54:09 crc kubenswrapper[4576]: I1203 09:54:09.681881 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 09:54:09 crc kubenswrapper[4576]: I1203 09:54:09.687706 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 09:54:09 crc kubenswrapper[4576]: I1203 09:54:09.688565 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 09:54:09 crc kubenswrapper[4576]: I1203 09:54:09.688647 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" gracePeriod=600 Dec 03 09:54:09 crc kubenswrapper[4576]: E1203 09:54:09.837704 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:54:10 crc kubenswrapper[4576]: I1203 09:54:10.807605 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" exitCode=0 Dec 03 09:54:10 crc kubenswrapper[4576]: I1203 09:54:10.807648 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79"} Dec 03 09:54:10 crc kubenswrapper[4576]: I1203 09:54:10.807679 4576 scope.go:117] "RemoveContainer" containerID="d61e9cb3c6077d03c4ea98ab7e87c2446a37e6e7aa4300bd57c4f1d62ebaf7cc" Dec 03 09:54:10 crc kubenswrapper[4576]: I1203 09:54:10.808289 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:54:10 crc kubenswrapper[4576]: E1203 09:54:10.808614 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:54:24 crc kubenswrapper[4576]: I1203 09:54:24.677633 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:54:24 crc kubenswrapper[4576]: E1203 09:54:24.678643 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:54:35 crc kubenswrapper[4576]: I1203 09:54:35.677449 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:54:35 crc kubenswrapper[4576]: E1203 09:54:35.678269 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:54:48 crc kubenswrapper[4576]: I1203 09:54:48.677265 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:54:48 crc kubenswrapper[4576]: E1203 09:54:48.678239 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:55:02 crc kubenswrapper[4576]: I1203 09:55:02.679233 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:55:02 crc kubenswrapper[4576]: E1203 09:55:02.680342 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:55:15 crc kubenswrapper[4576]: I1203 09:55:15.678104 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:55:15 crc kubenswrapper[4576]: E1203 09:55:15.680099 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:55:27 crc kubenswrapper[4576]: I1203 09:55:27.678684 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:55:27 crc kubenswrapper[4576]: E1203 09:55:27.679538 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:55:40 crc kubenswrapper[4576]: I1203 09:55:40.677943 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:55:40 crc kubenswrapper[4576]: E1203 09:55:40.678731 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:55:51 crc kubenswrapper[4576]: I1203 09:55:51.677824 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:55:51 crc kubenswrapper[4576]: E1203 09:55:51.678766 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:56:02 crc kubenswrapper[4576]: I1203 09:56:02.677261 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:56:02 crc kubenswrapper[4576]: E1203 09:56:02.678146 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:56:16 crc kubenswrapper[4576]: I1203 09:56:16.676980 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:56:16 crc kubenswrapper[4576]: E1203 09:56:16.677759 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:56:29 crc kubenswrapper[4576]: I1203 09:56:29.677022 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:56:29 crc kubenswrapper[4576]: E1203 09:56:29.677626 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:56:40 crc kubenswrapper[4576]: I1203 09:56:40.677373 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:56:40 crc kubenswrapper[4576]: E1203 09:56:40.679648 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:56:55 crc kubenswrapper[4576]: I1203 09:56:55.677631 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:56:55 crc kubenswrapper[4576]: E1203 09:56:55.678568 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:57:07 crc kubenswrapper[4576]: I1203 09:57:07.678418 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:57:07 crc kubenswrapper[4576]: E1203 09:57:07.679497 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:57:19 crc kubenswrapper[4576]: I1203 09:57:19.710178 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:57:19 crc kubenswrapper[4576]: E1203 09:57:19.711208 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:57:30 crc kubenswrapper[4576]: I1203 09:57:30.677910 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:57:30 crc kubenswrapper[4576]: E1203 09:57:30.678879 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.452910 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:31 crc kubenswrapper[4576]: E1203 09:57:31.453353 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="extract-content" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.453372 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="extract-content" Dec 03 09:57:31 crc kubenswrapper[4576]: E1203 09:57:31.453391 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="extract-utilities" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.453398 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="extract-utilities" Dec 03 09:57:31 crc kubenswrapper[4576]: E1203 09:57:31.453416 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="registry-server" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.453422 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="registry-server" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.453652 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="879b2044-307c-43a5-a641-b81827d1fcdf" containerName="registry-server" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.455021 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.480086 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.640657 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8jr7\" (UniqueName: \"kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.641250 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.641385 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.743602 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.744182 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.744334 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8jr7\" (UniqueName: \"kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.744459 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.744944 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:31 crc kubenswrapper[4576]: I1203 09:57:31.794907 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8jr7\" (UniqueName: \"kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7\") pod \"redhat-marketplace-8x9ck\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:32 crc kubenswrapper[4576]: I1203 09:57:32.080070 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:32 crc kubenswrapper[4576]: I1203 09:57:32.631232 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:32 crc kubenswrapper[4576]: I1203 09:57:32.681188 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerStarted","Data":"b14c75f9fb26a3ec02c5ab986226bcb037813e01fc48b5d41c5474f5a292622e"} Dec 03 09:57:33 crc kubenswrapper[4576]: I1203 09:57:33.720994 4576 generic.go:334] "Generic (PLEG): container finished" podID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerID="dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c" exitCode=0 Dec 03 09:57:33 crc kubenswrapper[4576]: I1203 09:57:33.721171 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerDied","Data":"dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c"} Dec 03 09:57:33 crc kubenswrapper[4576]: I1203 09:57:33.724037 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 09:57:34 crc kubenswrapper[4576]: I1203 09:57:34.738641 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerStarted","Data":"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620"} Dec 03 09:57:35 crc kubenswrapper[4576]: I1203 09:57:35.749612 4576 generic.go:334] "Generic (PLEG): container finished" podID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerID="55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620" exitCode=0 Dec 03 09:57:35 crc kubenswrapper[4576]: I1203 09:57:35.749743 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerDied","Data":"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620"} Dec 03 09:57:36 crc kubenswrapper[4576]: I1203 09:57:36.759988 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerStarted","Data":"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6"} Dec 03 09:57:36 crc kubenswrapper[4576]: I1203 09:57:36.778672 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8x9ck" podStartSLOduration=3.330088537 podStartE2EDuration="5.778647581s" podCreationTimestamp="2025-12-03 09:57:31 +0000 UTC" firstStartedPulling="2025-12-03 09:57:33.723570809 +0000 UTC m=+4661.109547803" lastFinishedPulling="2025-12-03 09:57:36.172129823 +0000 UTC m=+4663.558106847" observedRunningTime="2025-12-03 09:57:36.778063185 +0000 UTC m=+4664.164040169" watchObservedRunningTime="2025-12-03 09:57:36.778647581 +0000 UTC m=+4664.164624575" Dec 03 09:57:42 crc kubenswrapper[4576]: I1203 09:57:42.081161 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:42 crc kubenswrapper[4576]: I1203 09:57:42.081963 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:42 crc kubenswrapper[4576]: I1203 09:57:42.153682 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:42 crc kubenswrapper[4576]: I1203 09:57:42.879276 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:42 crc kubenswrapper[4576]: I1203 09:57:42.969500 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:44 crc kubenswrapper[4576]: I1203 09:57:44.677932 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:57:44 crc kubenswrapper[4576]: E1203 09:57:44.678615 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:57:44 crc kubenswrapper[4576]: I1203 09:57:44.839952 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8x9ck" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="registry-server" containerID="cri-o://aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6" gracePeriod=2 Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.513081 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.680536 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content\") pod \"e338de09-eabc-45ae-9595-eb154ac4b70e\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.680860 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8jr7\" (UniqueName: \"kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7\") pod \"e338de09-eabc-45ae-9595-eb154ac4b70e\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.680896 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities\") pod \"e338de09-eabc-45ae-9595-eb154ac4b70e\" (UID: \"e338de09-eabc-45ae-9595-eb154ac4b70e\") " Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.682075 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities" (OuterVolumeSpecName: "utilities") pod "e338de09-eabc-45ae-9595-eb154ac4b70e" (UID: "e338de09-eabc-45ae-9595-eb154ac4b70e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.743272 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e338de09-eabc-45ae-9595-eb154ac4b70e" (UID: "e338de09-eabc-45ae-9595-eb154ac4b70e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.783480 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.783565 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338de09-eabc-45ae-9595-eb154ac4b70e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.856349 4576 generic.go:334] "Generic (PLEG): container finished" podID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerID="aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6" exitCode=0 Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.856422 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerDied","Data":"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6"} Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.856430 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8x9ck" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.856481 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8x9ck" event={"ID":"e338de09-eabc-45ae-9595-eb154ac4b70e","Type":"ContainerDied","Data":"b14c75f9fb26a3ec02c5ab986226bcb037813e01fc48b5d41c5474f5a292622e"} Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.856506 4576 scope.go:117] "RemoveContainer" containerID="aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6" Dec 03 09:57:45 crc kubenswrapper[4576]: I1203 09:57:45.885966 4576 scope.go:117] "RemoveContainer" containerID="55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.117932 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7" (OuterVolumeSpecName: "kube-api-access-p8jr7") pod "e338de09-eabc-45ae-9595-eb154ac4b70e" (UID: "e338de09-eabc-45ae-9595-eb154ac4b70e"). InnerVolumeSpecName "kube-api-access-p8jr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.134918 4576 scope.go:117] "RemoveContainer" containerID="dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.191181 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8jr7\" (UniqueName: \"kubernetes.io/projected/e338de09-eabc-45ae-9595-eb154ac4b70e-kube-api-access-p8jr7\") on node \"crc\" DevicePath \"\"" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.230793 4576 scope.go:117] "RemoveContainer" containerID="aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6" Dec 03 09:57:46 crc kubenswrapper[4576]: E1203 09:57:46.231762 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6\": container with ID starting with aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6 not found: ID does not exist" containerID="aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.231795 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6"} err="failed to get container status \"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6\": rpc error: code = NotFound desc = could not find container \"aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6\": container with ID starting with aad5e9363bbff7693e68e1175ce195553ae1351ac0109321932d9ac4c52523a6 not found: ID does not exist" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.231820 4576 scope.go:117] "RemoveContainer" containerID="55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620" Dec 03 09:57:46 crc kubenswrapper[4576]: E1203 09:57:46.232191 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620\": container with ID starting with 55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620 not found: ID does not exist" containerID="55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.232214 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620"} err="failed to get container status \"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620\": rpc error: code = NotFound desc = could not find container \"55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620\": container with ID starting with 55ac919a9be4505a36a94d1a64dffa8e240e3fc331b5ba98e892e07188de7620 not found: ID does not exist" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.232228 4576 scope.go:117] "RemoveContainer" containerID="dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c" Dec 03 09:57:46 crc kubenswrapper[4576]: E1203 09:57:46.232450 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c\": container with ID starting with dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c not found: ID does not exist" containerID="dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.232482 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c"} err="failed to get container status \"dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c\": rpc error: code = NotFound desc = could not find container \"dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c\": container with ID starting with dd8d0f702537ebe084acb47c69da251e87a3ef4776c9090151994479cd1ae79c not found: ID does not exist" Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.274850 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:46 crc kubenswrapper[4576]: I1203 09:57:46.285883 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8x9ck"] Dec 03 09:57:47 crc kubenswrapper[4576]: I1203 09:57:47.691915 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" path="/var/lib/kubelet/pods/e338de09-eabc-45ae-9595-eb154ac4b70e/volumes" Dec 03 09:57:58 crc kubenswrapper[4576]: I1203 09:57:58.678152 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:57:58 crc kubenswrapper[4576]: E1203 09:57:58.680990 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:58:10 crc kubenswrapper[4576]: I1203 09:58:10.678303 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:58:10 crc kubenswrapper[4576]: E1203 09:58:10.679097 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:58:23 crc kubenswrapper[4576]: I1203 09:58:23.697230 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:58:23 crc kubenswrapper[4576]: E1203 09:58:23.698245 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:58:35 crc kubenswrapper[4576]: I1203 09:58:35.678495 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:58:35 crc kubenswrapper[4576]: E1203 09:58:35.679289 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:58:48 crc kubenswrapper[4576]: I1203 09:58:48.677835 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:58:48 crc kubenswrapper[4576]: E1203 09:58:48.678606 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:59:02 crc kubenswrapper[4576]: I1203 09:59:02.678163 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:59:02 crc kubenswrapper[4576]: E1203 09:59:02.679067 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 09:59:16 crc kubenswrapper[4576]: I1203 09:59:16.678236 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 09:59:17 crc kubenswrapper[4576]: I1203 09:59:17.767639 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068"} Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.057295 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:21 crc kubenswrapper[4576]: E1203 09:59:21.058205 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="extract-content" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.058222 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="extract-content" Dec 03 09:59:21 crc kubenswrapper[4576]: E1203 09:59:21.058248 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="extract-utilities" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.058257 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="extract-utilities" Dec 03 09:59:21 crc kubenswrapper[4576]: E1203 09:59:21.058275 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="registry-server" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.058282 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="registry-server" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.058508 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e338de09-eabc-45ae-9595-eb154ac4b70e" containerName="registry-server" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.060143 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.131956 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.163456 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.163569 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xk5n\" (UniqueName: \"kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.163625 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.265779 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.265874 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xk5n\" (UniqueName: \"kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.265938 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.266346 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.266402 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.303384 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xk5n\" (UniqueName: \"kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n\") pod \"certified-operators-gtcg9\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.380285 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:21 crc kubenswrapper[4576]: I1203 09:59:21.853454 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:22 crc kubenswrapper[4576]: I1203 09:59:22.814397 4576 generic.go:334] "Generic (PLEG): container finished" podID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerID="921acf4051add79d6ec2662d2d06dbdd8101d5ff6dd176b5e35e24d45fa9c27d" exitCode=0 Dec 03 09:59:22 crc kubenswrapper[4576]: I1203 09:59:22.814582 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerDied","Data":"921acf4051add79d6ec2662d2d06dbdd8101d5ff6dd176b5e35e24d45fa9c27d"} Dec 03 09:59:22 crc kubenswrapper[4576]: I1203 09:59:22.815368 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerStarted","Data":"38358779d46911d65a4ac7ac852007860d2907e5252c430ff9ee8739a0478d82"} Dec 03 09:59:23 crc kubenswrapper[4576]: I1203 09:59:23.847900 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerStarted","Data":"293e06222e6e1a2aafc2fc0502cd63774e776eaa03166886e8396d46795fd703"} Dec 03 09:59:24 crc kubenswrapper[4576]: I1203 09:59:24.858301 4576 generic.go:334] "Generic (PLEG): container finished" podID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerID="293e06222e6e1a2aafc2fc0502cd63774e776eaa03166886e8396d46795fd703" exitCode=0 Dec 03 09:59:24 crc kubenswrapper[4576]: I1203 09:59:24.858390 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerDied","Data":"293e06222e6e1a2aafc2fc0502cd63774e776eaa03166886e8396d46795fd703"} Dec 03 09:59:26 crc kubenswrapper[4576]: I1203 09:59:26.878581 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerStarted","Data":"22aacc05f756f6d6e4dad52fb9ba14f25f8b1db004d0feae59a984f02ba1daf3"} Dec 03 09:59:26 crc kubenswrapper[4576]: I1203 09:59:26.918477 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gtcg9" podStartSLOduration=2.442649003 podStartE2EDuration="5.918451096s" podCreationTimestamp="2025-12-03 09:59:21 +0000 UTC" firstStartedPulling="2025-12-03 09:59:22.816905379 +0000 UTC m=+4770.202882363" lastFinishedPulling="2025-12-03 09:59:26.292707482 +0000 UTC m=+4773.678684456" observedRunningTime="2025-12-03 09:59:26.90293842 +0000 UTC m=+4774.288915424" watchObservedRunningTime="2025-12-03 09:59:26.918451096 +0000 UTC m=+4774.304428080" Dec 03 09:59:30 crc kubenswrapper[4576]: I1203 09:59:30.838791 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:30 crc kubenswrapper[4576]: I1203 09:59:30.841747 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:30 crc kubenswrapper[4576]: I1203 09:59:30.856513 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.022819 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.023167 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.023451 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxfhm\" (UniqueName: \"kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.126450 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxfhm\" (UniqueName: \"kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.126615 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.126714 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.127293 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.129017 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.155336 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxfhm\" (UniqueName: \"kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm\") pod \"community-operators-kfrpw\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.175196 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.386326 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.388468 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.515835 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.801820 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:31 crc kubenswrapper[4576]: I1203 09:59:31.930919 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerStarted","Data":"67a292a042150e31f6825b41a76f9c1d1daab0b063ba904bd24f4409b0c8341b"} Dec 03 09:59:32 crc kubenswrapper[4576]: I1203 09:59:32.006181 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:32 crc kubenswrapper[4576]: I1203 09:59:32.938803 4576 generic.go:334] "Generic (PLEG): container finished" podID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerID="4414a6f0e5fda05073258894962bb1c36e6ffccdfad4a9018b17caef894ba930" exitCode=0 Dec 03 09:59:32 crc kubenswrapper[4576]: I1203 09:59:32.938878 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerDied","Data":"4414a6f0e5fda05073258894962bb1c36e6ffccdfad4a9018b17caef894ba930"} Dec 03 09:59:33 crc kubenswrapper[4576]: I1203 09:59:33.950733 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerStarted","Data":"a36ccc6ec3e2ddc5e2a4165773e327a0c5405f87f5cf0c77abc557db7b31deda"} Dec 03 09:59:35 crc kubenswrapper[4576]: I1203 09:59:35.971098 4576 generic.go:334] "Generic (PLEG): container finished" podID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerID="a36ccc6ec3e2ddc5e2a4165773e327a0c5405f87f5cf0c77abc557db7b31deda" exitCode=0 Dec 03 09:59:35 crc kubenswrapper[4576]: I1203 09:59:35.971173 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerDied","Data":"a36ccc6ec3e2ddc5e2a4165773e327a0c5405f87f5cf0c77abc557db7b31deda"} Dec 03 09:59:36 crc kubenswrapper[4576]: I1203 09:59:36.232601 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:36 crc kubenswrapper[4576]: I1203 09:59:36.232907 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gtcg9" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="registry-server" containerID="cri-o://22aacc05f756f6d6e4dad52fb9ba14f25f8b1db004d0feae59a984f02ba1daf3" gracePeriod=2 Dec 03 09:59:36 crc kubenswrapper[4576]: I1203 09:59:36.997864 4576 generic.go:334] "Generic (PLEG): container finished" podID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerID="22aacc05f756f6d6e4dad52fb9ba14f25f8b1db004d0feae59a984f02ba1daf3" exitCode=0 Dec 03 09:59:36 crc kubenswrapper[4576]: I1203 09:59:36.997951 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerDied","Data":"22aacc05f756f6d6e4dad52fb9ba14f25f8b1db004d0feae59a984f02ba1daf3"} Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.607013 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.747041 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xk5n\" (UniqueName: \"kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n\") pod \"e8479bbc-ecac-49d1-b114-5fca04a40157\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.747127 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content\") pod \"e8479bbc-ecac-49d1-b114-5fca04a40157\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.747167 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities\") pod \"e8479bbc-ecac-49d1-b114-5fca04a40157\" (UID: \"e8479bbc-ecac-49d1-b114-5fca04a40157\") " Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.748042 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities" (OuterVolumeSpecName: "utilities") pod "e8479bbc-ecac-49d1-b114-5fca04a40157" (UID: "e8479bbc-ecac-49d1-b114-5fca04a40157"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.753768 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n" (OuterVolumeSpecName: "kube-api-access-2xk5n") pod "e8479bbc-ecac-49d1-b114-5fca04a40157" (UID: "e8479bbc-ecac-49d1-b114-5fca04a40157"). InnerVolumeSpecName "kube-api-access-2xk5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.796399 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8479bbc-ecac-49d1-b114-5fca04a40157" (UID: "e8479bbc-ecac-49d1-b114-5fca04a40157"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.849884 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xk5n\" (UniqueName: \"kubernetes.io/projected/e8479bbc-ecac-49d1-b114-5fca04a40157-kube-api-access-2xk5n\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.849922 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:37 crc kubenswrapper[4576]: I1203 09:59:37.849936 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8479bbc-ecac-49d1-b114-5fca04a40157-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.008662 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerStarted","Data":"1db2a9666cce885bfdb6b99f28d5e58a20646e74c6575d599bace0375f4bdb12"} Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.026929 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gtcg9" event={"ID":"e8479bbc-ecac-49d1-b114-5fca04a40157","Type":"ContainerDied","Data":"38358779d46911d65a4ac7ac852007860d2907e5252c430ff9ee8739a0478d82"} Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.026999 4576 scope.go:117] "RemoveContainer" containerID="22aacc05f756f6d6e4dad52fb9ba14f25f8b1db004d0feae59a984f02ba1daf3" Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.027212 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gtcg9" Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.052634 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kfrpw" podStartSLOduration=4.030672234 podStartE2EDuration="8.052611284s" podCreationTimestamp="2025-12-03 09:59:30 +0000 UTC" firstStartedPulling="2025-12-03 09:59:32.941879822 +0000 UTC m=+4780.327856796" lastFinishedPulling="2025-12-03 09:59:36.963818852 +0000 UTC m=+4784.349795846" observedRunningTime="2025-12-03 09:59:38.042297347 +0000 UTC m=+4785.428274331" watchObservedRunningTime="2025-12-03 09:59:38.052611284 +0000 UTC m=+4785.438588268" Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.077261 4576 scope.go:117] "RemoveContainer" containerID="293e06222e6e1a2aafc2fc0502cd63774e776eaa03166886e8396d46795fd703" Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.079435 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.091442 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gtcg9"] Dec 03 09:59:38 crc kubenswrapper[4576]: I1203 09:59:38.135963 4576 scope.go:117] "RemoveContainer" containerID="921acf4051add79d6ec2662d2d06dbdd8101d5ff6dd176b5e35e24d45fa9c27d" Dec 03 09:59:39 crc kubenswrapper[4576]: I1203 09:59:39.689061 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" path="/var/lib/kubelet/pods/e8479bbc-ecac-49d1-b114-5fca04a40157/volumes" Dec 03 09:59:41 crc kubenswrapper[4576]: I1203 09:59:41.176986 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:41 crc kubenswrapper[4576]: I1203 09:59:41.177404 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:41 crc kubenswrapper[4576]: I1203 09:59:41.353945 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:42 crc kubenswrapper[4576]: I1203 09:59:42.115076 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:44 crc kubenswrapper[4576]: I1203 09:59:44.819151 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:44 crc kubenswrapper[4576]: I1203 09:59:44.819756 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kfrpw" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="registry-server" containerID="cri-o://1db2a9666cce885bfdb6b99f28d5e58a20646e74c6575d599bace0375f4bdb12" gracePeriod=2 Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.117657 4576 generic.go:334] "Generic (PLEG): container finished" podID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerID="1db2a9666cce885bfdb6b99f28d5e58a20646e74c6575d599bace0375f4bdb12" exitCode=0 Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.118305 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerDied","Data":"1db2a9666cce885bfdb6b99f28d5e58a20646e74c6575d599bace0375f4bdb12"} Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.493434 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.515951 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content\") pod \"b0354af3-e9c4-491f-a3a1-fc169075c341\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.516024 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities\") pod \"b0354af3-e9c4-491f-a3a1-fc169075c341\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.516257 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxfhm\" (UniqueName: \"kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm\") pod \"b0354af3-e9c4-491f-a3a1-fc169075c341\" (UID: \"b0354af3-e9c4-491f-a3a1-fc169075c341\") " Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.517402 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities" (OuterVolumeSpecName: "utilities") pod "b0354af3-e9c4-491f-a3a1-fc169075c341" (UID: "b0354af3-e9c4-491f-a3a1-fc169075c341"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.529438 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm" (OuterVolumeSpecName: "kube-api-access-gxfhm") pod "b0354af3-e9c4-491f-a3a1-fc169075c341" (UID: "b0354af3-e9c4-491f-a3a1-fc169075c341"). InnerVolumeSpecName "kube-api-access-gxfhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.574653 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0354af3-e9c4-491f-a3a1-fc169075c341" (UID: "b0354af3-e9c4-491f-a3a1-fc169075c341"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.619053 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxfhm\" (UniqueName: \"kubernetes.io/projected/b0354af3-e9c4-491f-a3a1-fc169075c341-kube-api-access-gxfhm\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.619279 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:45 crc kubenswrapper[4576]: I1203 09:59:45.619357 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0354af3-e9c4-491f-a3a1-fc169075c341-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.131600 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kfrpw" event={"ID":"b0354af3-e9c4-491f-a3a1-fc169075c341","Type":"ContainerDied","Data":"67a292a042150e31f6825b41a76f9c1d1daab0b063ba904bd24f4409b0c8341b"} Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.131674 4576 scope.go:117] "RemoveContainer" containerID="1db2a9666cce885bfdb6b99f28d5e58a20646e74c6575d599bace0375f4bdb12" Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.132868 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kfrpw" Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.159134 4576 scope.go:117] "RemoveContainer" containerID="a36ccc6ec3e2ddc5e2a4165773e327a0c5405f87f5cf0c77abc557db7b31deda" Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.165599 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.184041 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kfrpw"] Dec 03 09:59:46 crc kubenswrapper[4576]: I1203 09:59:46.217724 4576 scope.go:117] "RemoveContainer" containerID="4414a6f0e5fda05073258894962bb1c36e6ffccdfad4a9018b17caef894ba930" Dec 03 09:59:47 crc kubenswrapper[4576]: I1203 09:59:47.690494 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" path="/var/lib/kubelet/pods/b0354af3-e9c4-491f-a3a1-fc169075c341/volumes" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.164644 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz"] Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165568 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165583 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165609 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="extract-content" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165618 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="extract-content" Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165635 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="extract-utilities" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165643 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="extract-utilities" Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165659 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165665 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165678 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="extract-content" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165684 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="extract-content" Dec 03 10:00:00 crc kubenswrapper[4576]: E1203 10:00:00.165701 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="extract-utilities" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165708 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="extract-utilities" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165908 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8479bbc-ecac-49d1-b114-5fca04a40157" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.165936 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0354af3-e9c4-491f-a3a1-fc169075c341" containerName="registry-server" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.166832 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.173749 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.173747 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.194754 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.194845 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.194873 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6j9p\" (UniqueName: \"kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.211604 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz"] Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.296680 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.296948 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.297035 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6j9p\" (UniqueName: \"kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.297903 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.313983 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.325497 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6j9p\" (UniqueName: \"kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p\") pod \"collect-profiles-29412600-4h6nz\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:00 crc kubenswrapper[4576]: I1203 10:00:00.494167 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:01 crc kubenswrapper[4576]: I1203 10:00:01.115555 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz"] Dec 03 10:00:01 crc kubenswrapper[4576]: I1203 10:00:01.274426 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" event={"ID":"4957e290-37b6-470f-889e-f3f1a391fe02","Type":"ContainerStarted","Data":"5c36b1fcf76eefa92a0e8a47bd2295012727b77d2803ab2ee9130cac8059a95e"} Dec 03 10:00:01 crc kubenswrapper[4576]: I1203 10:00:01.274470 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" event={"ID":"4957e290-37b6-470f-889e-f3f1a391fe02","Type":"ContainerStarted","Data":"ef42435391135d3171a0dc8a61164bbe8cf93f81fc0afab52d7fc6b6d1484949"} Dec 03 10:00:01 crc kubenswrapper[4576]: I1203 10:00:01.307233 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" podStartSLOduration=1.307214637 podStartE2EDuration="1.307214637s" podCreationTimestamp="2025-12-03 10:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:00:01.293910168 +0000 UTC m=+4808.679887162" watchObservedRunningTime="2025-12-03 10:00:01.307214637 +0000 UTC m=+4808.693191621" Dec 03 10:00:02 crc kubenswrapper[4576]: E1203 10:00:02.077623 4576 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4957e290_37b6_470f_889e_f3f1a391fe02.slice/crio-5c36b1fcf76eefa92a0e8a47bd2295012727b77d2803ab2ee9130cac8059a95e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4957e290_37b6_470f_889e_f3f1a391fe02.slice/crio-conmon-5c36b1fcf76eefa92a0e8a47bd2295012727b77d2803ab2ee9130cac8059a95e.scope\": RecentStats: unable to find data in memory cache]" Dec 03 10:00:02 crc kubenswrapper[4576]: I1203 10:00:02.286169 4576 generic.go:334] "Generic (PLEG): container finished" podID="4957e290-37b6-470f-889e-f3f1a391fe02" containerID="5c36b1fcf76eefa92a0e8a47bd2295012727b77d2803ab2ee9130cac8059a95e" exitCode=0 Dec 03 10:00:02 crc kubenswrapper[4576]: I1203 10:00:02.286219 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" event={"ID":"4957e290-37b6-470f-889e-f3f1a391fe02","Type":"ContainerDied","Data":"5c36b1fcf76eefa92a0e8a47bd2295012727b77d2803ab2ee9130cac8059a95e"} Dec 03 10:00:03 crc kubenswrapper[4576]: I1203 10:00:03.940771 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.015582 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume\") pod \"4957e290-37b6-470f-889e-f3f1a391fe02\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.015701 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6j9p\" (UniqueName: \"kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p\") pod \"4957e290-37b6-470f-889e-f3f1a391fe02\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.015741 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume\") pod \"4957e290-37b6-470f-889e-f3f1a391fe02\" (UID: \"4957e290-37b6-470f-889e-f3f1a391fe02\") " Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.016394 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume" (OuterVolumeSpecName: "config-volume") pod "4957e290-37b6-470f-889e-f3f1a391fe02" (UID: "4957e290-37b6-470f-889e-f3f1a391fe02"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.022262 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p" (OuterVolumeSpecName: "kube-api-access-r6j9p") pod "4957e290-37b6-470f-889e-f3f1a391fe02" (UID: "4957e290-37b6-470f-889e-f3f1a391fe02"). InnerVolumeSpecName "kube-api-access-r6j9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.024748 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4957e290-37b6-470f-889e-f3f1a391fe02" (UID: "4957e290-37b6-470f-889e-f3f1a391fe02"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.118137 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4957e290-37b6-470f-889e-f3f1a391fe02-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.118182 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6j9p\" (UniqueName: \"kubernetes.io/projected/4957e290-37b6-470f-889e-f3f1a391fe02-kube-api-access-r6j9p\") on node \"crc\" DevicePath \"\"" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.118198 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4957e290-37b6-470f-889e-f3f1a391fe02-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.334608 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" event={"ID":"4957e290-37b6-470f-889e-f3f1a391fe02","Type":"ContainerDied","Data":"ef42435391135d3171a0dc8a61164bbe8cf93f81fc0afab52d7fc6b6d1484949"} Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.334645 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef42435391135d3171a0dc8a61164bbe8cf93f81fc0afab52d7fc6b6d1484949" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.334707 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412600-4h6nz" Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.390584 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl"] Dec 03 10:00:04 crc kubenswrapper[4576]: I1203 10:00:04.401387 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412555-7sjxl"] Dec 03 10:00:05 crc kubenswrapper[4576]: I1203 10:00:05.692261 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cda19d3-1f9d-4c20-9a3f-eb3e19542a35" path="/var/lib/kubelet/pods/2cda19d3-1f9d-4c20-9a3f-eb3e19542a35/volumes" Dec 03 10:00:57 crc kubenswrapper[4576]: I1203 10:00:57.937432 4576 scope.go:117] "RemoveContainer" containerID="9000014e5e90d665690f3ef7d8feeeaa29e07496ada51baf487e901867c6f830" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.148291 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29412601-4r4n5"] Dec 03 10:01:00 crc kubenswrapper[4576]: E1203 10:01:00.149255 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4957e290-37b6-470f-889e-f3f1a391fe02" containerName="collect-profiles" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.149269 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4957e290-37b6-470f-889e-f3f1a391fe02" containerName="collect-profiles" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.149482 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4957e290-37b6-470f-889e-f3f1a391fe02" containerName="collect-profiles" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.150157 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.170293 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412601-4r4n5"] Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.330938 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.331029 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc45h\" (UniqueName: \"kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.331281 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.331621 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.433018 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.433140 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.433183 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc45h\" (UniqueName: \"kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.433245 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.439256 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.439462 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.442154 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.457695 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc45h\" (UniqueName: \"kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h\") pod \"keystone-cron-29412601-4r4n5\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.471027 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:00 crc kubenswrapper[4576]: I1203 10:01:00.985868 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29412601-4r4n5"] Dec 03 10:01:01 crc kubenswrapper[4576]: I1203 10:01:01.013186 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412601-4r4n5" event={"ID":"ccd3075b-2364-4539-b6c1-046722becd51","Type":"ContainerStarted","Data":"4c8f616e1eead9f66f304fc7efd930df7808dd514b5b40697c38efcf2111f709"} Dec 03 10:01:02 crc kubenswrapper[4576]: I1203 10:01:02.031417 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412601-4r4n5" event={"ID":"ccd3075b-2364-4539-b6c1-046722becd51","Type":"ContainerStarted","Data":"4d606209195816b95917e5184db612c26a7437106e85f321b5ba016b61b84e22"} Dec 03 10:01:02 crc kubenswrapper[4576]: I1203 10:01:02.056338 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29412601-4r4n5" podStartSLOduration=2.056319564 podStartE2EDuration="2.056319564s" podCreationTimestamp="2025-12-03 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:01:02.050212608 +0000 UTC m=+4869.436189622" watchObservedRunningTime="2025-12-03 10:01:02.056319564 +0000 UTC m=+4869.442296548" Dec 03 10:01:05 crc kubenswrapper[4576]: I1203 10:01:05.073410 4576 generic.go:334] "Generic (PLEG): container finished" podID="ccd3075b-2364-4539-b6c1-046722becd51" containerID="4d606209195816b95917e5184db612c26a7437106e85f321b5ba016b61b84e22" exitCode=0 Dec 03 10:01:05 crc kubenswrapper[4576]: I1203 10:01:05.073475 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412601-4r4n5" event={"ID":"ccd3075b-2364-4539-b6c1-046722becd51","Type":"ContainerDied","Data":"4d606209195816b95917e5184db612c26a7437106e85f321b5ba016b61b84e22"} Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.525462 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.656315 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc45h\" (UniqueName: \"kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h\") pod \"ccd3075b-2364-4539-b6c1-046722becd51\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.656362 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data\") pod \"ccd3075b-2364-4539-b6c1-046722becd51\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.656593 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle\") pod \"ccd3075b-2364-4539-b6c1-046722becd51\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.656658 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys\") pod \"ccd3075b-2364-4539-b6c1-046722becd51\" (UID: \"ccd3075b-2364-4539-b6c1-046722becd51\") " Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.673431 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h" (OuterVolumeSpecName: "kube-api-access-pc45h") pod "ccd3075b-2364-4539-b6c1-046722becd51" (UID: "ccd3075b-2364-4539-b6c1-046722becd51"). InnerVolumeSpecName "kube-api-access-pc45h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.673573 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ccd3075b-2364-4539-b6c1-046722becd51" (UID: "ccd3075b-2364-4539-b6c1-046722becd51"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.698158 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccd3075b-2364-4539-b6c1-046722becd51" (UID: "ccd3075b-2364-4539-b6c1-046722becd51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.709646 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data" (OuterVolumeSpecName: "config-data") pod "ccd3075b-2364-4539-b6c1-046722becd51" (UID: "ccd3075b-2364-4539-b6c1-046722becd51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.759263 4576 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.759296 4576 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.759305 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc45h\" (UniqueName: \"kubernetes.io/projected/ccd3075b-2364-4539-b6c1-046722becd51-kube-api-access-pc45h\") on node \"crc\" DevicePath \"\"" Dec 03 10:01:06 crc kubenswrapper[4576]: I1203 10:01:06.759314 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd3075b-2364-4539-b6c1-046722becd51-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 10:01:07 crc kubenswrapper[4576]: I1203 10:01:07.097233 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29412601-4r4n5" event={"ID":"ccd3075b-2364-4539-b6c1-046722becd51","Type":"ContainerDied","Data":"4c8f616e1eead9f66f304fc7efd930df7808dd514b5b40697c38efcf2111f709"} Dec 03 10:01:07 crc kubenswrapper[4576]: I1203 10:01:07.097282 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c8f616e1eead9f66f304fc7efd930df7808dd514b5b40697c38efcf2111f709" Dec 03 10:01:07 crc kubenswrapper[4576]: I1203 10:01:07.097336 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29412601-4r4n5" Dec 03 10:01:39 crc kubenswrapper[4576]: I1203 10:01:39.680727 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:01:39 crc kubenswrapper[4576]: I1203 10:01:39.681264 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:02:09 crc kubenswrapper[4576]: I1203 10:02:09.681091 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:02:09 crc kubenswrapper[4576]: I1203 10:02:09.681784 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:02:39 crc kubenswrapper[4576]: I1203 10:02:39.682187 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:02:39 crc kubenswrapper[4576]: I1203 10:02:39.683114 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:02:39 crc kubenswrapper[4576]: I1203 10:02:39.728244 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 10:02:39 crc kubenswrapper[4576]: I1203 10:02:39.728772 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 10:02:39 crc kubenswrapper[4576]: I1203 10:02:39.728827 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068" gracePeriod=600 Dec 03 10:02:40 crc kubenswrapper[4576]: I1203 10:02:40.644207 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068" exitCode=0 Dec 03 10:02:40 crc kubenswrapper[4576]: I1203 10:02:40.644261 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068"} Dec 03 10:02:40 crc kubenswrapper[4576]: I1203 10:02:40.644880 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af"} Dec 03 10:02:40 crc kubenswrapper[4576]: I1203 10:02:40.644913 4576 scope.go:117] "RemoveContainer" containerID="1cd38f3963db452577ddef8d05fbbfe3ed203e3c62f5c4d5773a282726874a79" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.550850 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:24 crc kubenswrapper[4576]: E1203 10:03:24.551678 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd3075b-2364-4539-b6c1-046722becd51" containerName="keystone-cron" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.551690 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd3075b-2364-4539-b6c1-046722becd51" containerName="keystone-cron" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.551911 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd3075b-2364-4539-b6c1-046722becd51" containerName="keystone-cron" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.553264 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.570138 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.675156 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skgj2\" (UniqueName: \"kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.675477 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.675724 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.777178 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skgj2\" (UniqueName: \"kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.777302 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.777349 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.777842 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.778335 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.811085 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skgj2\" (UniqueName: \"kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2\") pod \"redhat-operators-khjhd\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:24 crc kubenswrapper[4576]: I1203 10:03:24.889267 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:25 crc kubenswrapper[4576]: I1203 10:03:25.382252 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:26 crc kubenswrapper[4576]: I1203 10:03:26.137750 4576 generic.go:334] "Generic (PLEG): container finished" podID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerID="0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba" exitCode=0 Dec 03 10:03:26 crc kubenswrapper[4576]: I1203 10:03:26.137924 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerDied","Data":"0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba"} Dec 03 10:03:26 crc kubenswrapper[4576]: I1203 10:03:26.138270 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerStarted","Data":"d17287c90a7546be8685ef378cfaa48eb69f2bd04a0aeff7b576f9b391f9871e"} Dec 03 10:03:26 crc kubenswrapper[4576]: I1203 10:03:26.140580 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 10:03:27 crc kubenswrapper[4576]: I1203 10:03:27.150458 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerStarted","Data":"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4"} Dec 03 10:03:31 crc kubenswrapper[4576]: I1203 10:03:31.216515 4576 generic.go:334] "Generic (PLEG): container finished" podID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerID="267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4" exitCode=0 Dec 03 10:03:31 crc kubenswrapper[4576]: I1203 10:03:31.216595 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerDied","Data":"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4"} Dec 03 10:03:32 crc kubenswrapper[4576]: I1203 10:03:32.227264 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerStarted","Data":"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f"} Dec 03 10:03:32 crc kubenswrapper[4576]: I1203 10:03:32.247733 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-khjhd" podStartSLOduration=2.634664494 podStartE2EDuration="8.247710176s" podCreationTimestamp="2025-12-03 10:03:24 +0000 UTC" firstStartedPulling="2025-12-03 10:03:26.140304412 +0000 UTC m=+5013.526281396" lastFinishedPulling="2025-12-03 10:03:31.753350083 +0000 UTC m=+5019.139327078" observedRunningTime="2025-12-03 10:03:32.244647234 +0000 UTC m=+5019.630624228" watchObservedRunningTime="2025-12-03 10:03:32.247710176 +0000 UTC m=+5019.633687160" Dec 03 10:03:34 crc kubenswrapper[4576]: I1203 10:03:34.889890 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:34 crc kubenswrapper[4576]: I1203 10:03:34.891676 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:35 crc kubenswrapper[4576]: I1203 10:03:35.938991 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-khjhd" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="registry-server" probeResult="failure" output=< Dec 03 10:03:35 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 10:03:35 crc kubenswrapper[4576]: > Dec 03 10:03:45 crc kubenswrapper[4576]: I1203 10:03:45.479633 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:45 crc kubenswrapper[4576]: I1203 10:03:45.528886 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:45 crc kubenswrapper[4576]: I1203 10:03:45.721172 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.399874 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-khjhd" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="registry-server" containerID="cri-o://8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f" gracePeriod=2 Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.922144 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.996090 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content\") pod \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.996128 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities\") pod \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.996187 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skgj2\" (UniqueName: \"kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2\") pod \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\" (UID: \"d82b2188-a2f8-4373-b1d7-ecb2670e5718\") " Dec 03 10:03:47 crc kubenswrapper[4576]: I1203 10:03:47.997491 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities" (OuterVolumeSpecName: "utilities") pod "d82b2188-a2f8-4373-b1d7-ecb2670e5718" (UID: "d82b2188-a2f8-4373-b1d7-ecb2670e5718"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.027616 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2" (OuterVolumeSpecName: "kube-api-access-skgj2") pod "d82b2188-a2f8-4373-b1d7-ecb2670e5718" (UID: "d82b2188-a2f8-4373-b1d7-ecb2670e5718"). InnerVolumeSpecName "kube-api-access-skgj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.098341 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.098372 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skgj2\" (UniqueName: \"kubernetes.io/projected/d82b2188-a2f8-4373-b1d7-ecb2670e5718-kube-api-access-skgj2\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.123221 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d82b2188-a2f8-4373-b1d7-ecb2670e5718" (UID: "d82b2188-a2f8-4373-b1d7-ecb2670e5718"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.200763 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b2188-a2f8-4373-b1d7-ecb2670e5718-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.410145 4576 generic.go:334] "Generic (PLEG): container finished" podID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerID="8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f" exitCode=0 Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.410227 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerDied","Data":"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f"} Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.410509 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-khjhd" event={"ID":"d82b2188-a2f8-4373-b1d7-ecb2670e5718","Type":"ContainerDied","Data":"d17287c90a7546be8685ef378cfaa48eb69f2bd04a0aeff7b576f9b391f9871e"} Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.410549 4576 scope.go:117] "RemoveContainer" containerID="8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.410255 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-khjhd" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.414393 4576 generic.go:334] "Generic (PLEG): container finished" podID="34096dc4-8175-4637-916a-9e52376b8c08" containerID="95fba86fc10497a6aceba808d3334f2aa2ce11feb0407bd279ef2e7374fc3c49" exitCode=0 Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.414584 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"34096dc4-8175-4637-916a-9e52376b8c08","Type":"ContainerDied","Data":"95fba86fc10497a6aceba808d3334f2aa2ce11feb0407bd279ef2e7374fc3c49"} Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.435761 4576 scope.go:117] "RemoveContainer" containerID="267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.462995 4576 scope.go:117] "RemoveContainer" containerID="0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.487211 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.518854 4576 scope.go:117] "RemoveContainer" containerID="8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f" Dec 03 10:03:48 crc kubenswrapper[4576]: E1203 10:03:48.520045 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f\": container with ID starting with 8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f not found: ID does not exist" containerID="8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.520097 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f"} err="failed to get container status \"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f\": rpc error: code = NotFound desc = could not find container \"8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f\": container with ID starting with 8a27ddc2f7931c727e88dd032a50ab99f8cbb4078f282d5a87e859c5fb88f67f not found: ID does not exist" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.520174 4576 scope.go:117] "RemoveContainer" containerID="267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4" Dec 03 10:03:48 crc kubenswrapper[4576]: E1203 10:03:48.524191 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4\": container with ID starting with 267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4 not found: ID does not exist" containerID="267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.524457 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4"} err="failed to get container status \"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4\": rpc error: code = NotFound desc = could not find container \"267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4\": container with ID starting with 267650588befbf6df0c7e2b5ecababcfb2ed9dbb37a60f31671eb61aef326de4 not found: ID does not exist" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.524572 4576 scope.go:117] "RemoveContainer" containerID="0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba" Dec 03 10:03:48 crc kubenswrapper[4576]: E1203 10:03:48.525099 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba\": container with ID starting with 0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba not found: ID does not exist" containerID="0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.525192 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba"} err="failed to get container status \"0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba\": rpc error: code = NotFound desc = could not find container \"0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba\": container with ID starting with 0628d3e9669a976beea88ff05f9dba051d74de369877c535949858b01a9069ba not found: ID does not exist" Dec 03 10:03:48 crc kubenswrapper[4576]: I1203 10:03:48.526160 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-khjhd"] Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.691939 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" path="/var/lib/kubelet/pods/d82b2188-a2f8-4373-b1d7-ecb2670e5718/volumes" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.823059 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940412 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940577 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940602 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940870 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940893 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940941 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.940986 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.941034 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.941140 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9p29\" (UniqueName: \"kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29\") pod \"34096dc4-8175-4637-916a-9e52376b8c08\" (UID: \"34096dc4-8175-4637-916a-9e52376b8c08\") " Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.942446 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.943839 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data" (OuterVolumeSpecName: "config-data") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.946863 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.946966 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.947555 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29" (OuterVolumeSpecName: "kube-api-access-t9p29") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "kube-api-access-t9p29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.968354 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.978155 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:03:49 crc kubenswrapper[4576]: I1203 10:03:49.980669 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.003338 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "34096dc4-8175-4637-916a-9e52376b8c08" (UID: "34096dc4-8175-4637-916a-9e52376b8c08"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042671 4576 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042698 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042711 4576 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042721 4576 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042730 4576 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/34096dc4-8175-4637-916a-9e52376b8c08-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.042737 4576 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/34096dc4-8175-4637-916a-9e52376b8c08-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.043363 4576 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.043375 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9p29\" (UniqueName: \"kubernetes.io/projected/34096dc4-8175-4637-916a-9e52376b8c08-kube-api-access-t9p29\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.043384 4576 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/34096dc4-8175-4637-916a-9e52376b8c08-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.068223 4576 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.144461 4576 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.452178 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"34096dc4-8175-4637-916a-9e52376b8c08","Type":"ContainerDied","Data":"83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58"} Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.452221 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83666ab84866734d20baeeae375208e9f32d0649ec6a70a8d9d5112abb514c58" Dec 03 10:03:50 crc kubenswrapper[4576]: I1203 10:03:50.452280 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.092458 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 10:03:57 crc kubenswrapper[4576]: E1203 10:03:57.093292 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="extract-utilities" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093305 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="extract-utilities" Dec 03 10:03:57 crc kubenswrapper[4576]: E1203 10:03:57.093316 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="extract-content" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093322 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="extract-content" Dec 03 10:03:57 crc kubenswrapper[4576]: E1203 10:03:57.093347 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="registry-server" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093353 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="registry-server" Dec 03 10:03:57 crc kubenswrapper[4576]: E1203 10:03:57.093368 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34096dc4-8175-4637-916a-9e52376b8c08" containerName="tempest-tests-tempest-tests-runner" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093374 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="34096dc4-8175-4637-916a-9e52376b8c08" containerName="tempest-tests-tempest-tests-runner" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093567 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="34096dc4-8175-4637-916a-9e52376b8c08" containerName="tempest-tests-tempest-tests-runner" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.093585 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82b2188-a2f8-4373-b1d7-ecb2670e5718" containerName="registry-server" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.094165 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.098589 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-2cmsr" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.105348 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.277250 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dsnm\" (UniqueName: \"kubernetes.io/projected/d9e81811-e29e-4a52-a3d6-3ed997b86415-kube-api-access-6dsnm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.277345 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.379683 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dsnm\" (UniqueName: \"kubernetes.io/projected/d9e81811-e29e-4a52-a3d6-3ed997b86415-kube-api-access-6dsnm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.379812 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.381370 4576 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.624281 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dsnm\" (UniqueName: \"kubernetes.io/projected/d9e81811-e29e-4a52-a3d6-3ed997b86415-kube-api-access-6dsnm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.649864 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d9e81811-e29e-4a52-a3d6-3ed997b86415\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:57 crc kubenswrapper[4576]: I1203 10:03:57.733292 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 10:03:58 crc kubenswrapper[4576]: I1203 10:03:58.228032 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 10:03:58 crc kubenswrapper[4576]: I1203 10:03:58.522056 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"d9e81811-e29e-4a52-a3d6-3ed997b86415","Type":"ContainerStarted","Data":"f4d5835976dfcf1f3d5688ba1300f5f13114e97918034f7800b12050bd1293a2"} Dec 03 10:03:59 crc kubenswrapper[4576]: I1203 10:03:59.534189 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"d9e81811-e29e-4a52-a3d6-3ed997b86415","Type":"ContainerStarted","Data":"55899353b0187b8eb5afebf2bd3b4fb7a72443d492a29074cb2f51201183cf18"} Dec 03 10:03:59 crc kubenswrapper[4576]: I1203 10:03:59.557696 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.680246632 podStartE2EDuration="2.557646465s" podCreationTimestamp="2025-12-03 10:03:57 +0000 UTC" firstStartedPulling="2025-12-03 10:03:58.237946963 +0000 UTC m=+5045.623923947" lastFinishedPulling="2025-12-03 10:03:59.115346786 +0000 UTC m=+5046.501323780" observedRunningTime="2025-12-03 10:03:59.548792616 +0000 UTC m=+5046.934769600" watchObservedRunningTime="2025-12-03 10:03:59.557646465 +0000 UTC m=+5046.943623489" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.683846 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58lk4/must-gather-bgxvl"] Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.687063 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.690243 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-58lk4"/"openshift-service-ca.crt" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.690538 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-58lk4"/"kube-root-ca.crt" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.731704 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk7tr\" (UniqueName: \"kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.732009 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.734799 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-58lk4/must-gather-bgxvl"] Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.833339 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk7tr\" (UniqueName: \"kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.833780 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.834217 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:22 crc kubenswrapper[4576]: I1203 10:04:22.884683 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk7tr\" (UniqueName: \"kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr\") pod \"must-gather-bgxvl\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:23 crc kubenswrapper[4576]: I1203 10:04:23.012766 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:04:23 crc kubenswrapper[4576]: I1203 10:04:23.529392 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-58lk4/must-gather-bgxvl"] Dec 03 10:04:23 crc kubenswrapper[4576]: I1203 10:04:23.844341 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/must-gather-bgxvl" event={"ID":"dcea8e10-9631-40f3-8579-359218432ec7","Type":"ContainerStarted","Data":"ecbfcdd9fcc3fd1946ca35ca30ad7072e6e72f0ff32240818bc07544393a23ac"} Dec 03 10:04:28 crc kubenswrapper[4576]: I1203 10:04:28.893711 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/must-gather-bgxvl" event={"ID":"dcea8e10-9631-40f3-8579-359218432ec7","Type":"ContainerStarted","Data":"aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba"} Dec 03 10:04:28 crc kubenswrapper[4576]: I1203 10:04:28.894230 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/must-gather-bgxvl" event={"ID":"dcea8e10-9631-40f3-8579-359218432ec7","Type":"ContainerStarted","Data":"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf"} Dec 03 10:04:28 crc kubenswrapper[4576]: I1203 10:04:28.918382 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-58lk4/must-gather-bgxvl" podStartSLOduration=2.455313506 podStartE2EDuration="6.918360343s" podCreationTimestamp="2025-12-03 10:04:22 +0000 UTC" firstStartedPulling="2025-12-03 10:04:23.553279318 +0000 UTC m=+5070.939256302" lastFinishedPulling="2025-12-03 10:04:28.016326145 +0000 UTC m=+5075.402303139" observedRunningTime="2025-12-03 10:04:28.908892597 +0000 UTC m=+5076.294869581" watchObservedRunningTime="2025-12-03 10:04:28.918360343 +0000 UTC m=+5076.304337327" Dec 03 10:04:33 crc kubenswrapper[4576]: I1203 10:04:33.771138 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58lk4/crc-debug-22gdb"] Dec 03 10:04:33 crc kubenswrapper[4576]: I1203 10:04:33.774938 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:33 crc kubenswrapper[4576]: I1203 10:04:33.778111 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58lk4"/"default-dockercfg-52nm9" Dec 03 10:04:33 crc kubenswrapper[4576]: I1203 10:04:33.969075 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:33 crc kubenswrapper[4576]: I1203 10:04:33.969785 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv9m2\" (UniqueName: \"kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.072638 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv9m2\" (UniqueName: \"kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.072788 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.072881 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.102385 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv9m2\" (UniqueName: \"kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2\") pod \"crc-debug-22gdb\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.401942 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:04:34 crc kubenswrapper[4576]: I1203 10:04:34.945154 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-22gdb" event={"ID":"6cb11ff9-f058-4d0c-a500-de8b366776ca","Type":"ContainerStarted","Data":"ecf4a9db4a2ed3b2cd0b44e0d61fcf14c2cb10d1b1c8a5142f81ff803c7ee906"} Dec 03 10:04:48 crc kubenswrapper[4576]: I1203 10:04:48.071128 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-22gdb" event={"ID":"6cb11ff9-f058-4d0c-a500-de8b366776ca","Type":"ContainerStarted","Data":"b14041852be20b0870c486e8e5feb59527cb39fdf4cc2a30b42b4576e5f40b08"} Dec 03 10:04:48 crc kubenswrapper[4576]: I1203 10:04:48.088791 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-58lk4/crc-debug-22gdb" podStartSLOduration=2.155716053 podStartE2EDuration="15.088769762s" podCreationTimestamp="2025-12-03 10:04:33 +0000 UTC" firstStartedPulling="2025-12-03 10:04:34.429921068 +0000 UTC m=+5081.815898052" lastFinishedPulling="2025-12-03 10:04:47.362974777 +0000 UTC m=+5094.748951761" observedRunningTime="2025-12-03 10:04:48.08609066 +0000 UTC m=+5095.472067654" watchObservedRunningTime="2025-12-03 10:04:48.088769762 +0000 UTC m=+5095.474746746" Dec 03 10:05:09 crc kubenswrapper[4576]: I1203 10:05:09.680742 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:05:09 crc kubenswrapper[4576]: I1203 10:05:09.682619 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:05:38 crc kubenswrapper[4576]: I1203 10:05:38.614126 4576 generic.go:334] "Generic (PLEG): container finished" podID="6cb11ff9-f058-4d0c-a500-de8b366776ca" containerID="b14041852be20b0870c486e8e5feb59527cb39fdf4cc2a30b42b4576e5f40b08" exitCode=0 Dec 03 10:05:38 crc kubenswrapper[4576]: I1203 10:05:38.614250 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-22gdb" event={"ID":"6cb11ff9-f058-4d0c-a500-de8b366776ca","Type":"ContainerDied","Data":"b14041852be20b0870c486e8e5feb59527cb39fdf4cc2a30b42b4576e5f40b08"} Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.688137 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.688617 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.721765 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.765035 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv9m2\" (UniqueName: \"kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2\") pod \"6cb11ff9-f058-4d0c-a500-de8b366776ca\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.765232 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host\") pod \"6cb11ff9-f058-4d0c-a500-de8b366776ca\" (UID: \"6cb11ff9-f058-4d0c-a500-de8b366776ca\") " Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.765374 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host" (OuterVolumeSpecName: "host") pod "6cb11ff9-f058-4d0c-a500-de8b366776ca" (UID: "6cb11ff9-f058-4d0c-a500-de8b366776ca"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.765883 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6cb11ff9-f058-4d0c-a500-de8b366776ca-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.776672 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-22gdb"] Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.777640 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2" (OuterVolumeSpecName: "kube-api-access-zv9m2") pod "6cb11ff9-f058-4d0c-a500-de8b366776ca" (UID: "6cb11ff9-f058-4d0c-a500-de8b366776ca"). InnerVolumeSpecName "kube-api-access-zv9m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.782106 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-22gdb"] Dec 03 10:05:39 crc kubenswrapper[4576]: I1203 10:05:39.868014 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv9m2\" (UniqueName: \"kubernetes.io/projected/6cb11ff9-f058-4d0c-a500-de8b366776ca-kube-api-access-zv9m2\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.642453 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ecf4a9db4a2ed3b2cd0b44e0d61fcf14c2cb10d1b1c8a5142f81ff803c7ee906" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.642472 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-22gdb" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.979318 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58lk4/crc-debug-mjwwr"] Dec 03 10:05:40 crc kubenswrapper[4576]: E1203 10:05:40.980472 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb11ff9-f058-4d0c-a500-de8b366776ca" containerName="container-00" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.980592 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb11ff9-f058-4d0c-a500-de8b366776ca" containerName="container-00" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.980909 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cb11ff9-f058-4d0c-a500-de8b366776ca" containerName="container-00" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.981776 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:40 crc kubenswrapper[4576]: I1203 10:05:40.984202 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58lk4"/"default-dockercfg-52nm9" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.093090 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.093385 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bptn\" (UniqueName: \"kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.196271 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bptn\" (UniqueName: \"kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.196509 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.196678 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.228405 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bptn\" (UniqueName: \"kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn\") pod \"crc-debug-mjwwr\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.301330 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.652975 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" event={"ID":"9c4629f5-59d0-4b0b-b435-746e49fdc4b0","Type":"ContainerStarted","Data":"0dfe98a817e92c9aea9b36e8dc41432a0e4ec7482f118f23e1092790525cd69c"} Dec 03 10:05:41 crc kubenswrapper[4576]: I1203 10:05:41.694450 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cb11ff9-f058-4d0c-a500-de8b366776ca" path="/var/lib/kubelet/pods/6cb11ff9-f058-4d0c-a500-de8b366776ca/volumes" Dec 03 10:05:42 crc kubenswrapper[4576]: I1203 10:05:42.663240 4576 generic.go:334] "Generic (PLEG): container finished" podID="9c4629f5-59d0-4b0b-b435-746e49fdc4b0" containerID="c6f33df55e1ab6708f81dd627f67e816510503e143ebd6f04015494a73165f68" exitCode=0 Dec 03 10:05:42 crc kubenswrapper[4576]: I1203 10:05:42.663374 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" event={"ID":"9c4629f5-59d0-4b0b-b435-746e49fdc4b0","Type":"ContainerDied","Data":"c6f33df55e1ab6708f81dd627f67e816510503e143ebd6f04015494a73165f68"} Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.143812 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.169865 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host\") pod \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.170162 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bptn\" (UniqueName: \"kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn\") pod \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\" (UID: \"9c4629f5-59d0-4b0b-b435-746e49fdc4b0\") " Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.171125 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host" (OuterVolumeSpecName: "host") pod "9c4629f5-59d0-4b0b-b435-746e49fdc4b0" (UID: "9c4629f5-59d0-4b0b-b435-746e49fdc4b0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.181962 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn" (OuterVolumeSpecName: "kube-api-access-5bptn") pod "9c4629f5-59d0-4b0b-b435-746e49fdc4b0" (UID: "9c4629f5-59d0-4b0b-b435-746e49fdc4b0"). InnerVolumeSpecName "kube-api-access-5bptn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.272735 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.272769 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bptn\" (UniqueName: \"kubernetes.io/projected/9c4629f5-59d0-4b0b-b435-746e49fdc4b0-kube-api-access-5bptn\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.684359 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" event={"ID":"9c4629f5-59d0-4b0b-b435-746e49fdc4b0","Type":"ContainerDied","Data":"0dfe98a817e92c9aea9b36e8dc41432a0e4ec7482f118f23e1092790525cd69c"} Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.684402 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dfe98a817e92c9aea9b36e8dc41432a0e4ec7482f118f23e1092790525cd69c" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.689780 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-mjwwr" Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.988063 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-mjwwr"] Dec 03 10:05:44 crc kubenswrapper[4576]: I1203 10:05:44.998341 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-mjwwr"] Dec 03 10:05:45 crc kubenswrapper[4576]: I1203 10:05:45.687992 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c4629f5-59d0-4b0b-b435-746e49fdc4b0" path="/var/lib/kubelet/pods/9c4629f5-59d0-4b0b-b435-746e49fdc4b0/volumes" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.207142 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-58lk4/crc-debug-kvwbm"] Dec 03 10:05:46 crc kubenswrapper[4576]: E1203 10:05:46.207513 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4629f5-59d0-4b0b-b435-746e49fdc4b0" containerName="container-00" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.207542 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4629f5-59d0-4b0b-b435-746e49fdc4b0" containerName="container-00" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.207757 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c4629f5-59d0-4b0b-b435-746e49fdc4b0" containerName="container-00" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.208339 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.211684 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-58lk4"/"default-dockercfg-52nm9" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.314148 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.314475 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8gkh\" (UniqueName: \"kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.415757 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.415842 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8gkh\" (UniqueName: \"kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.415890 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.444049 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8gkh\" (UniqueName: \"kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh\") pod \"crc-debug-kvwbm\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.526971 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:46 crc kubenswrapper[4576]: W1203 10:05:46.552160 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab55536d_9146_4d9b_b68f_c2ee96c39fd1.slice/crio-e49d286f9afa0939d47d9f74967f40c65dd253cf859f129390ed7b16adbfbaec WatchSource:0}: Error finding container e49d286f9afa0939d47d9f74967f40c65dd253cf859f129390ed7b16adbfbaec: Status 404 returned error can't find the container with id e49d286f9afa0939d47d9f74967f40c65dd253cf859f129390ed7b16adbfbaec Dec 03 10:05:46 crc kubenswrapper[4576]: I1203 10:05:46.709348 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" event={"ID":"ab55536d-9146-4d9b-b68f-c2ee96c39fd1","Type":"ContainerStarted","Data":"e49d286f9afa0939d47d9f74967f40c65dd253cf859f129390ed7b16adbfbaec"} Dec 03 10:05:47 crc kubenswrapper[4576]: I1203 10:05:47.719515 4576 generic.go:334] "Generic (PLEG): container finished" podID="ab55536d-9146-4d9b-b68f-c2ee96c39fd1" containerID="48a988db535c2cc5e7fe2cb23dd51bdc0ff7fbdc011f7d592ecb314a7cebcfb0" exitCode=0 Dec 03 10:05:47 crc kubenswrapper[4576]: I1203 10:05:47.719572 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" event={"ID":"ab55536d-9146-4d9b-b68f-c2ee96c39fd1","Type":"ContainerDied","Data":"48a988db535c2cc5e7fe2cb23dd51bdc0ff7fbdc011f7d592ecb314a7cebcfb0"} Dec 03 10:05:47 crc kubenswrapper[4576]: I1203 10:05:47.759432 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-kvwbm"] Dec 03 10:05:47 crc kubenswrapper[4576]: I1203 10:05:47.768886 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58lk4/crc-debug-kvwbm"] Dec 03 10:05:48 crc kubenswrapper[4576]: I1203 10:05:48.835994 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:05:48 crc kubenswrapper[4576]: I1203 10:05:48.962678 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8gkh\" (UniqueName: \"kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh\") pod \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " Dec 03 10:05:48 crc kubenswrapper[4576]: I1203 10:05:48.962890 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host\") pod \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\" (UID: \"ab55536d-9146-4d9b-b68f-c2ee96c39fd1\") " Dec 03 10:05:48 crc kubenswrapper[4576]: I1203 10:05:48.966229 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host" (OuterVolumeSpecName: "host") pod "ab55536d-9146-4d9b-b68f-c2ee96c39fd1" (UID: "ab55536d-9146-4d9b-b68f-c2ee96c39fd1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:05:48 crc kubenswrapper[4576]: I1203 10:05:48.990236 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh" (OuterVolumeSpecName: "kube-api-access-c8gkh") pod "ab55536d-9146-4d9b-b68f-c2ee96c39fd1" (UID: "ab55536d-9146-4d9b-b68f-c2ee96c39fd1"). InnerVolumeSpecName "kube-api-access-c8gkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:05:49 crc kubenswrapper[4576]: I1203 10:05:49.067873 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8gkh\" (UniqueName: \"kubernetes.io/projected/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-kube-api-access-c8gkh\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:49 crc kubenswrapper[4576]: I1203 10:05:49.067928 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab55536d-9146-4d9b-b68f-c2ee96c39fd1-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:05:49 crc kubenswrapper[4576]: I1203 10:05:49.687431 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab55536d-9146-4d9b-b68f-c2ee96c39fd1" path="/var/lib/kubelet/pods/ab55536d-9146-4d9b-b68f-c2ee96c39fd1/volumes" Dec 03 10:05:49 crc kubenswrapper[4576]: I1203 10:05:49.744752 4576 scope.go:117] "RemoveContainer" containerID="48a988db535c2cc5e7fe2cb23dd51bdc0ff7fbdc011f7d592ecb314a7cebcfb0" Dec 03 10:05:49 crc kubenswrapper[4576]: I1203 10:05:49.744784 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/crc-debug-kvwbm" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.680937 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.681465 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.688150 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.688994 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.689060 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" gracePeriod=600 Dec 03 10:06:09 crc kubenswrapper[4576]: E1203 10:06:09.822161 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.961076 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" exitCode=0 Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.961127 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af"} Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.961167 4576 scope.go:117] "RemoveContainer" containerID="a6eb737ac8500af571785c87c9921c0edf7cefbe10bc5543eca2bb51c65c9068" Dec 03 10:06:09 crc kubenswrapper[4576]: I1203 10:06:09.961928 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:06:09 crc kubenswrapper[4576]: E1203 10:06:09.962232 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.495165 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-679c878b58-l5t4t_c3348234-cbbe-464e-b7dd-493151ce96ef/barbican-api-log/0.log" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.508498 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-679c878b58-l5t4t_c3348234-cbbe-464e-b7dd-493151ce96ef/barbican-api/0.log" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.731709 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9f4d5dcf8-5lvz8_14ee4879-18ef-4d7f-956f-03297ff160fe/barbican-keystone-listener/0.log" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.878618 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9f4d5dcf8-5lvz8_14ee4879-18ef-4d7f-956f-03297ff160fe/barbican-keystone-listener-log/0.log" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.932699 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7678fb6bf-m8m4k_e8e42590-8fdb-4c05-a5e1-e2cddbeb0731/barbican-worker/0.log" Dec 03 10:06:11 crc kubenswrapper[4576]: I1203 10:06:11.960879 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7678fb6bf-m8m4k_e8e42590-8fdb-4c05-a5e1-e2cddbeb0731/barbican-worker-log/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.186304 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv_a1b39b7c-1acb-467a-904c-7ee77350804b/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.299039 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/ceilometer-central-agent/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.450171 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/ceilometer-notification-agent/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.504656 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/proxy-httpd/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.517490 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/sg-core/0.log" Dec 03 10:06:12 crc kubenswrapper[4576]: I1203 10:06:12.713443 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ee570a8-cd83-4db6-bffa-080a2dae8552/cinder-api/0.log" Dec 03 10:06:13 crc kubenswrapper[4576]: I1203 10:06:13.399789 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ee570a8-cd83-4db6-bffa-080a2dae8552/cinder-api-log/0.log" Dec 03 10:06:13 crc kubenswrapper[4576]: I1203 10:06:13.564085 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_11c5d059-87c8-4fe8-ad1b-e50f1b029e8b/cinder-scheduler/0.log" Dec 03 10:06:13 crc kubenswrapper[4576]: I1203 10:06:13.736562 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_11c5d059-87c8-4fe8-ad1b-e50f1b029e8b/probe/0.log" Dec 03 10:06:13 crc kubenswrapper[4576]: I1203 10:06:13.943373 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm_308d4747-e427-4848-8961-a21d39dbd449/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.031138 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq_be59e37f-89da-4b5c-9126-9fd6fe4d9ec8/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.166007 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/init/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.464777 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/init/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.474949 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/dnsmasq-dns/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.540937 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7_fc6a72d3-31b7-461d-82f3-09536b77f9e6/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.780650 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ebb315c8-6124-44ed-8bb7-f82b228893e2/glance-httpd/0.log" Dec 03 10:06:14 crc kubenswrapper[4576]: I1203 10:06:14.884696 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ebb315c8-6124-44ed-8bb7-f82b228893e2/glance-log/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.012772 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ad4e4d0b-1390-463b-9337-3b3d8f6ca758/glance-httpd/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.057305 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ad4e4d0b-1390-463b-9337-3b3d8f6ca758/glance-log/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.307928 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon/1.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.396335 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.726645 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j6249_55ad3ecf-5525-4292-b4e9-98456a2dc903/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.764679 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon-log/0.log" Dec 03 10:06:15 crc kubenswrapper[4576]: I1203 10:06:15.801086 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-2vcbc_71120894-497f-438e-a42f-f3e6fd50d2de/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:16 crc kubenswrapper[4576]: I1203 10:06:16.001063 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412601-4r4n5_ccd3075b-2364-4539-b6c1-046722becd51/keystone-cron/0.log" Dec 03 10:06:16 crc kubenswrapper[4576]: I1203 10:06:16.282316 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba/kube-state-metrics/0.log" Dec 03 10:06:16 crc kubenswrapper[4576]: I1203 10:06:16.500056 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9_af426bee-00a4-4c61-be68-87719bd4f285/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:16 crc kubenswrapper[4576]: I1203 10:06:16.571637 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-69657bfb7-ncr7l_ffc7abfc-aecf-42de-8947-143cd7bda142/keystone-api/0.log" Dec 03 10:06:17 crc kubenswrapper[4576]: I1203 10:06:17.016241 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5_94db1892-a7dd-4a07-b181-fa2fbcffe2fc/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:17 crc kubenswrapper[4576]: I1203 10:06:17.291360 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f978d8b99-7d88p_cdfca795-30e9-4534-9084-e34e01ab71ae/neutron-httpd/0.log" Dec 03 10:06:17 crc kubenswrapper[4576]: I1203 10:06:17.644983 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f978d8b99-7d88p_cdfca795-30e9-4534-9084-e34e01ab71ae/neutron-api/0.log" Dec 03 10:06:18 crc kubenswrapper[4576]: I1203 10:06:18.219045 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_2ce21985-e2a9-48a8-bdca-ad4dc248ff98/nova-cell0-conductor-conductor/0.log" Dec 03 10:06:18 crc kubenswrapper[4576]: I1203 10:06:18.459762 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2be87e85-f004-483d-9faf-4f1dd3a050bd/nova-cell1-conductor-conductor/0.log" Dec 03 10:06:18 crc kubenswrapper[4576]: I1203 10:06:18.644167 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b02586c5-f716-48ea-bc4a-fefa33df684e/nova-api-log/0.log" Dec 03 10:06:18 crc kubenswrapper[4576]: I1203 10:06:18.835207 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_69cd46ae-0738-4d84-87a3-077751519dc4/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 10:06:18 crc kubenswrapper[4576]: I1203 10:06:18.941748 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b02586c5-f716-48ea-bc4a-fefa33df684e/nova-api-api/0.log" Dec 03 10:06:19 crc kubenswrapper[4576]: I1203 10:06:19.319793 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-649lx_c7424b9a-5544-49a0-af69-fc3d308bf468/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:19 crc kubenswrapper[4576]: I1203 10:06:19.411474 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8dddc23a-4179-44b0-b145-a91ab3441703/nova-metadata-log/0.log" Dec 03 10:06:19 crc kubenswrapper[4576]: I1203 10:06:19.937960 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/mysql-bootstrap/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.115402 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/mysql-bootstrap/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.213709 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/galera/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.254391 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_60431290-c470-429d-938a-588668bb2887/nova-scheduler-scheduler/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.467521 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/mysql-bootstrap/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.912977 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/mysql-bootstrap/0.log" Dec 03 10:06:20 crc kubenswrapper[4576]: I1203 10:06:20.966650 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/galera/0.log" Dec 03 10:06:21 crc kubenswrapper[4576]: I1203 10:06:21.119237 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8dddc23a-4179-44b0-b145-a91ab3441703/nova-metadata-metadata/0.log" Dec 03 10:06:21 crc kubenswrapper[4576]: I1203 10:06:21.132788 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_649a142a-4649-45fb-bdba-11fcc838bf97/openstackclient/0.log" Dec 03 10:06:21 crc kubenswrapper[4576]: I1203 10:06:21.295299 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-25phb_1f709485-8dc6-4e99-ba88-880d491fca2e/ovn-controller/0.log" Dec 03 10:06:21 crc kubenswrapper[4576]: I1203 10:06:21.437728 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-lxvbj_ac988c47-bfaa-4142-a15b-6c69acd494df/openstack-network-exporter/0.log" Dec 03 10:06:21 crc kubenswrapper[4576]: I1203 10:06:21.656024 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server-init/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.075775 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server-init/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.098370 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.112366 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovs-vswitchd/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.631445 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-6n2r7_d7aa650b-ed26-494b-bc5f-95320ad9be67/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.649990 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_636d191f-b7e2-4200-8dc3-5b0f386e2499/openstack-network-exporter/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.694993 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_636d191f-b7e2-4200-8dc3-5b0f386e2499/ovn-northd/0.log" Dec 03 10:06:22 crc kubenswrapper[4576]: I1203 10:06:22.990642 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_64d45188-c499-4626-bdd3-6f54a0ed3f14/ovsdbserver-nb/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.031460 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_64d45188-c499-4626-bdd3-6f54a0ed3f14/openstack-network-exporter/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.209896 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3a2bee80-63d4-41ae-97fc-54a96c4afc6e/openstack-network-exporter/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.235067 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3a2bee80-63d4-41ae-97fc-54a96c4afc6e/ovsdbserver-sb/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.478693 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f666f544-xj7sz_a1aaa45e-8e67-4360-b4db-2d2866d00865/placement-api/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.566587 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/setup-container/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.652083 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f666f544-xj7sz_a1aaa45e-8e67-4360-b4db-2d2866d00865/placement-log/0.log" Dec 03 10:06:23 crc kubenswrapper[4576]: I1203 10:06:23.929834 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/rabbitmq/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.004314 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/setup-container/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.100722 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/setup-container/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.266064 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/rabbitmq/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.314048 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/setup-container/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.493708 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf_96dfc3f2-bb7c-407d-8714-98a679e6d78e/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.600959 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-82clv_3fc3717e-aa96-497b-8d90-3c247a234d88/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.677380 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:06:24 crc kubenswrapper[4576]: E1203 10:06:24.680516 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.812362 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-86chp_fd4aa481-7064-4ebc-bc06-d706d427260d/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:24 crc kubenswrapper[4576]: I1203 10:06:24.892340 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-h2l89_9afda75e-55d1-4823-a4be-3c79bf36b3b2/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.151748 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-k8srx_2c30d1eb-aa4b-44e6-b424-dcdd12b23090/ssh-known-hosts-edpm-deployment/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.388632 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b677c5dc5-pfc4n_6bf8d1cf-0003-4e48-89f5-7ae1698f27ff/proxy-server/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.524421 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-649tl_801bf024-9418-42e1-893f-0a4b82d411b4/swift-ring-rebalance/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.583557 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b677c5dc5-pfc4n_6bf8d1cf-0003-4e48-89f5-7ae1698f27ff/proxy-httpd/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.668143 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-auditor/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.851694 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-reaper/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.855613 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-replicator/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.943638 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-auditor/0.log" Dec 03 10:06:25 crc kubenswrapper[4576]: I1203 10:06:25.952279 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-server/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.216780 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-updater/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.227126 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-server/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.253093 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-replicator/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.274757 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-auditor/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.521099 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-replicator/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.566383 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-expirer/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.637261 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-server/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.644364 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-updater/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.808217 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/rsync/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.811609 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/swift-recon-cron/0.log" Dec 03 10:06:26 crc kubenswrapper[4576]: I1203 10:06:26.940408 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm_f432497e-88f4-424f-beb0-856c58fb586d/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:27 crc kubenswrapper[4576]: I1203 10:06:27.050536 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_34096dc4-8175-4637-916a-9e52376b8c08/tempest-tests-tempest-tests-runner/0.log" Dec 03 10:06:27 crc kubenswrapper[4576]: I1203 10:06:27.271175 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_d9e81811-e29e-4a52-a3d6-3ed997b86415/test-operator-logs-container/0.log" Dec 03 10:06:27 crc kubenswrapper[4576]: I1203 10:06:27.480685 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg_da77edd1-65a1-4f59-a4d3-e57679ae6acf/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:06:39 crc kubenswrapper[4576]: I1203 10:06:39.676991 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:06:39 crc kubenswrapper[4576]: E1203 10:06:39.677891 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:06:39 crc kubenswrapper[4576]: I1203 10:06:39.818309 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c7c148cb-508f-45ed-a5ea-06b0b4bc51ff/memcached/0.log" Dec 03 10:06:52 crc kubenswrapper[4576]: I1203 10:06:52.677821 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:06:52 crc kubenswrapper[4576]: E1203 10:06:52.680153 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.296470 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.470774 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.495227 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.502248 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.680686 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.697315 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/extract/0.log" Dec 03 10:06:58 crc kubenswrapper[4576]: I1203 10:06:58.763662 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.179157 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4mznn_3ea8e44c-f5b3-4b92-94ed-04954472481c/kube-rbac-proxy/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.285096 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4mznn_3ea8e44c-f5b3-4b92-94ed-04954472481c/manager/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.300285 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wqx97_cce29053-f3ed-4dce-a362-c99b4aa31102/kube-rbac-proxy/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.428997 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wqx97_cce29053-f3ed-4dce-a362-c99b4aa31102/manager/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.547448 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-n78hw_75840987-c6e3-45e6-912c-85771c498e41/kube-rbac-proxy/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.556693 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-n78hw_75840987-c6e3-45e6-912c-85771c498e41/manager/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.752494 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zghdb_df68290e-5853-4fce-903f-354ea9f740e1/kube-rbac-proxy/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.827461 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zghdb_df68290e-5853-4fce-903f-354ea9f740e1/manager/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.918623 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-nsggq_649283f6-ebcd-45a0-974f-e9c14138fa46/kube-rbac-proxy/0.log" Dec 03 10:06:59 crc kubenswrapper[4576]: I1203 10:06:59.954874 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-nsggq_649283f6-ebcd-45a0-974f-e9c14138fa46/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.081705 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8g876_41505f0c-de81-41e5-b9e1-de8a17563b8d/kube-rbac-proxy/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.145649 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8g876_41505f0c-de81-41e5-b9e1-de8a17563b8d/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.227777 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-4tr5z_f923e423-dcca-499c-8bf1-1c5d4288f20c/kube-rbac-proxy/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.431412 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-4tr5z_f923e423-dcca-499c-8bf1-1c5d4288f20c/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.489144 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-49hnv_ac2346b5-8522-40bf-8083-15d06d8b9afd/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.495030 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-49hnv_ac2346b5-8522-40bf-8083-15d06d8b9afd/kube-rbac-proxy/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.657680 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-64r8s_13d78877-8170-498d-bf0c-ab37fb799c83/kube-rbac-proxy/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.783218 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-64r8s_13d78877-8170-498d-bf0c-ab37fb799c83/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.855438 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-w68kf_7c74d79c-0100-40b9-a363-434b817b0504/kube-rbac-proxy/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.874874 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-w68kf_7c74d79c-0100-40b9-a363-434b817b0504/manager/0.log" Dec 03 10:07:00 crc kubenswrapper[4576]: I1203 10:07:00.972560 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-42rgl_a3246ab9-76a7-41dd-9fcd-57323766f4f2/kube-rbac-proxy/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.082187 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-42rgl_a3246ab9-76a7-41dd-9fcd-57323766f4f2/manager/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.203371 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-jwqvx_7e7e8ece-f6be-41dc-be20-b82b844b4b83/kube-rbac-proxy/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.318132 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7mg4g_e697b8bb-b78f-4b0c-92e6-adde533c75b6/kube-rbac-proxy/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.356873 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-jwqvx_7e7e8ece-f6be-41dc-be20-b82b844b4b83/manager/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.547832 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7mg4g_e697b8bb-b78f-4b0c-92e6-adde533c75b6/manager/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.584607 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-rflv4_5280c7ee-cf95-4f36-a074-247880784343/kube-rbac-proxy/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.628336 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-rflv4_5280c7ee-cf95-4f36-a074-247880784343/manager/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.784896 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm_bad742c6-6ff9-4fe9-8a09-7d399b6d41de/manager/0.log" Dec 03 10:07:01 crc kubenswrapper[4576]: I1203 10:07:01.866969 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm_bad742c6-6ff9-4fe9-8a09-7d399b6d41de/kube-rbac-proxy/0.log" Dec 03 10:07:02 crc kubenswrapper[4576]: I1203 10:07:02.362003 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6757ffd54f-4wfmb_b30928c7-4b2d-4fec-81f3-9237336e8d81/operator/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.043881 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-rwqb7_335bbabf-ec3b-484b-8081-08e919ec8dcd/registry-server/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.099904 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g9t26_c4445b62-9884-4667-96cd-ce531cc798c4/kube-rbac-proxy/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.101608 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g9t26_c4445b62-9884-4667-96cd-ce531cc798c4/manager/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.353604 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-tdbjg_8c321c82-4420-4b97-a16b-ce20c7ebcb15/kube-rbac-proxy/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.446729 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-tdbjg_8c321c82-4420-4b97-a16b-ce20c7ebcb15/manager/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.497624 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-665947b8b5-hr855_446e8b7b-0e54-4b76-b0c7-56ec1f779499/manager/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.576552 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qv7t2_e72f6251-8004-43cc-9bf2-80bc4b8d4431/operator/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.650200 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-6mlkp_f6e0d66d-6dc0-461c-a5c5-8a1060b6b164/kube-rbac-proxy/0.log" Dec 03 10:07:03 crc kubenswrapper[4576]: I1203 10:07:03.699377 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-6mlkp_f6e0d66d-6dc0-461c-a5c5-8a1060b6b164/manager/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.307365 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vb4nv_7d654424-85f1-4848-93f3-abb64297ce3b/kube-rbac-proxy/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.377689 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vb4nv_7d654424-85f1-4848-93f3-abb64297ce3b/manager/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.437505 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-ndz7z_3ce77fe1-0135-4043-9ebd-b7722db624d9/kube-rbac-proxy/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.507002 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-ndz7z_3ce77fe1-0135-4043-9ebd-b7722db624d9/manager/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.592862 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-4c8wv_8a7accfb-c3a2-4f70-906e-b2a3545eb88a/kube-rbac-proxy/0.log" Dec 03 10:07:04 crc kubenswrapper[4576]: I1203 10:07:04.640690 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-4c8wv_8a7accfb-c3a2-4f70-906e-b2a3545eb88a/manager/0.log" Dec 03 10:07:05 crc kubenswrapper[4576]: I1203 10:07:05.678358 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:07:05 crc kubenswrapper[4576]: E1203 10:07:05.685267 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:07:19 crc kubenswrapper[4576]: I1203 10:07:19.678053 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:07:19 crc kubenswrapper[4576]: E1203 10:07:19.679078 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:07:26 crc kubenswrapper[4576]: I1203 10:07:26.331430 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kbnz6_19be1a82-6279-466c-a719-e346d59597be/control-plane-machine-set-operator/0.log" Dec 03 10:07:26 crc kubenswrapper[4576]: I1203 10:07:26.484384 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nsf2s_bedc7336-f16c-485b-8cc8-13eea705d68a/kube-rbac-proxy/0.log" Dec 03 10:07:26 crc kubenswrapper[4576]: I1203 10:07:26.541740 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nsf2s_bedc7336-f16c-485b-8cc8-13eea705d68a/machine-api-operator/0.log" Dec 03 10:07:32 crc kubenswrapper[4576]: I1203 10:07:32.677573 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:07:32 crc kubenswrapper[4576]: E1203 10:07:32.678472 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.324298 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-ms2nw_5d5e47f6-494f-4fc7-a4c0-c12410e86da3/cert-manager-controller/0.log" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.620902 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-l45pw_59825354-6654-4c6a-be27-4d3b6f2a57c2/cert-manager-cainjector/0.log" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.678194 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-fjnrl_a1cefe8c-df93-4ed2-a334-c60ce9cc918d/cert-manager-webhook/0.log" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.835668 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:42 crc kubenswrapper[4576]: E1203 10:07:42.836038 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab55536d-9146-4d9b-b68f-c2ee96c39fd1" containerName="container-00" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.836053 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab55536d-9146-4d9b-b68f-c2ee96c39fd1" containerName="container-00" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.836259 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab55536d-9146-4d9b-b68f-c2ee96c39fd1" containerName="container-00" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.837626 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.854141 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.885248 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.885353 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.885430 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rffz2\" (UniqueName: \"kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.986942 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rffz2\" (UniqueName: \"kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.987060 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.987141 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.987719 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:42 crc kubenswrapper[4576]: I1203 10:07:42.987826 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:43 crc kubenswrapper[4576]: I1203 10:07:43.317251 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rffz2\" (UniqueName: \"kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2\") pod \"redhat-marketplace-j2j6f\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:43 crc kubenswrapper[4576]: I1203 10:07:43.455486 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:44 crc kubenswrapper[4576]: I1203 10:07:44.004507 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:44 crc kubenswrapper[4576]: I1203 10:07:44.845900 4576 generic.go:334] "Generic (PLEG): container finished" podID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerID="b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e" exitCode=0 Dec 03 10:07:44 crc kubenswrapper[4576]: I1203 10:07:44.845965 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerDied","Data":"b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e"} Dec 03 10:07:44 crc kubenswrapper[4576]: I1203 10:07:44.846295 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerStarted","Data":"3bc496cb2e39eb3c04a012a48f4d20af7eccfd7b9d9fc5b9741fcda7d7b94337"} Dec 03 10:07:46 crc kubenswrapper[4576]: I1203 10:07:46.678061 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:07:46 crc kubenswrapper[4576]: E1203 10:07:46.678870 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:07:46 crc kubenswrapper[4576]: I1203 10:07:46.874945 4576 generic.go:334] "Generic (PLEG): container finished" podID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerID="6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2" exitCode=0 Dec 03 10:07:46 crc kubenswrapper[4576]: I1203 10:07:46.874997 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerDied","Data":"6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2"} Dec 03 10:07:47 crc kubenswrapper[4576]: I1203 10:07:47.888558 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerStarted","Data":"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71"} Dec 03 10:07:47 crc kubenswrapper[4576]: I1203 10:07:47.912391 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j2j6f" podStartSLOduration=3.500994033 podStartE2EDuration="5.912359857s" podCreationTimestamp="2025-12-03 10:07:42 +0000 UTC" firstStartedPulling="2025-12-03 10:07:44.849109721 +0000 UTC m=+5272.235086715" lastFinishedPulling="2025-12-03 10:07:47.260475535 +0000 UTC m=+5274.646452539" observedRunningTime="2025-12-03 10:07:47.905344998 +0000 UTC m=+5275.291321982" watchObservedRunningTime="2025-12-03 10:07:47.912359857 +0000 UTC m=+5275.298336841" Dec 03 10:07:53 crc kubenswrapper[4576]: I1203 10:07:53.455726 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:53 crc kubenswrapper[4576]: I1203 10:07:53.458008 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:53 crc kubenswrapper[4576]: I1203 10:07:53.750615 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:54 crc kubenswrapper[4576]: I1203 10:07:54.016038 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:54 crc kubenswrapper[4576]: I1203 10:07:54.083643 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:55 crc kubenswrapper[4576]: I1203 10:07:55.978673 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j2j6f" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="registry-server" containerID="cri-o://e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71" gracePeriod=2 Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.507483 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.665755 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rffz2\" (UniqueName: \"kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2\") pod \"cb866353-e7d9-4df0-8b3e-00e48556f7de\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.666052 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content\") pod \"cb866353-e7d9-4df0-8b3e-00e48556f7de\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.666084 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities\") pod \"cb866353-e7d9-4df0-8b3e-00e48556f7de\" (UID: \"cb866353-e7d9-4df0-8b3e-00e48556f7de\") " Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.667116 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities" (OuterVolumeSpecName: "utilities") pod "cb866353-e7d9-4df0-8b3e-00e48556f7de" (UID: "cb866353-e7d9-4df0-8b3e-00e48556f7de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.679295 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2" (OuterVolumeSpecName: "kube-api-access-rffz2") pod "cb866353-e7d9-4df0-8b3e-00e48556f7de" (UID: "cb866353-e7d9-4df0-8b3e-00e48556f7de"). InnerVolumeSpecName "kube-api-access-rffz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.689229 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb866353-e7d9-4df0-8b3e-00e48556f7de" (UID: "cb866353-e7d9-4df0-8b3e-00e48556f7de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.769406 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rffz2\" (UniqueName: \"kubernetes.io/projected/cb866353-e7d9-4df0-8b3e-00e48556f7de-kube-api-access-rffz2\") on node \"crc\" DevicePath \"\"" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.769679 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.769760 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb866353-e7d9-4df0-8b3e-00e48556f7de-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.989326 4576 generic.go:334] "Generic (PLEG): container finished" podID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerID="e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71" exitCode=0 Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.989370 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerDied","Data":"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71"} Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.989430 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2j6f" event={"ID":"cb866353-e7d9-4df0-8b3e-00e48556f7de","Type":"ContainerDied","Data":"3bc496cb2e39eb3c04a012a48f4d20af7eccfd7b9d9fc5b9741fcda7d7b94337"} Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.989449 4576 scope.go:117] "RemoveContainer" containerID="e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71" Dec 03 10:07:56 crc kubenswrapper[4576]: I1203 10:07:56.990585 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2j6f" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.028728 4576 scope.go:117] "RemoveContainer" containerID="6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.040543 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.059976 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2j6f"] Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.071712 4576 scope.go:117] "RemoveContainer" containerID="b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.119742 4576 scope.go:117] "RemoveContainer" containerID="e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71" Dec 03 10:07:57 crc kubenswrapper[4576]: E1203 10:07:57.120402 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71\": container with ID starting with e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71 not found: ID does not exist" containerID="e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.120457 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71"} err="failed to get container status \"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71\": rpc error: code = NotFound desc = could not find container \"e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71\": container with ID starting with e35e62be8cac2fdecc3ec20f7682d9ffbc53bbc2f7f226780bfcea7f4871dd71 not found: ID does not exist" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.120485 4576 scope.go:117] "RemoveContainer" containerID="6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2" Dec 03 10:07:57 crc kubenswrapper[4576]: E1203 10:07:57.120993 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2\": container with ID starting with 6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2 not found: ID does not exist" containerID="6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.121016 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2"} err="failed to get container status \"6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2\": rpc error: code = NotFound desc = could not find container \"6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2\": container with ID starting with 6f8fd4b5932074b5c54556ab3fa1918fb8d889c7c50cb3a4501f847d549989c2 not found: ID does not exist" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.121032 4576 scope.go:117] "RemoveContainer" containerID="b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e" Dec 03 10:07:57 crc kubenswrapper[4576]: E1203 10:07:57.121331 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e\": container with ID starting with b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e not found: ID does not exist" containerID="b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.121371 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e"} err="failed to get container status \"b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e\": rpc error: code = NotFound desc = could not find container \"b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e\": container with ID starting with b3427a5246ef46c7b673a0144cd4e6683e34da01ed0d0ea11ad8f009fec0d66e not found: ID does not exist" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.486860 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-rfj5d_cb5b59ce-56be-4b48-902a-902dc9e7a707/nmstate-console-plugin/0.log" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.628578 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ll4ls_263f185a-e858-45d0-a61c-453056d0a98c/nmstate-handler/0.log" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.687481 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" path="/var/lib/kubelet/pods/cb866353-e7d9-4df0-8b3e-00e48556f7de/volumes" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.803582 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-h2g5z_4811e0da-b69c-436c-a2f9-1796a35b69ee/nmstate-metrics/0.log" Dec 03 10:07:57 crc kubenswrapper[4576]: I1203 10:07:57.852768 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-h2g5z_4811e0da-b69c-436c-a2f9-1796a35b69ee/kube-rbac-proxy/0.log" Dec 03 10:07:58 crc kubenswrapper[4576]: I1203 10:07:58.080547 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8ttll_fee161aa-5f26-41be-ba50-4b06079f597e/nmstate-operator/0.log" Dec 03 10:07:58 crc kubenswrapper[4576]: I1203 10:07:58.135113 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-889j7_208539d3-2c0f-4889-9239-c3dddd20ad3b/nmstate-webhook/0.log" Dec 03 10:08:01 crc kubenswrapper[4576]: I1203 10:08:01.677804 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:08:01 crc kubenswrapper[4576]: E1203 10:08:01.678574 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:08:16 crc kubenswrapper[4576]: I1203 10:08:16.636152 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-9fpwm_35fce6e6-ceb3-4844-ad2e-fb7454c2e425/kube-rbac-proxy/0.log" Dec 03 10:08:16 crc kubenswrapper[4576]: I1203 10:08:16.677386 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:08:16 crc kubenswrapper[4576]: E1203 10:08:16.677959 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:08:16 crc kubenswrapper[4576]: I1203 10:08:16.865720 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-9fpwm_35fce6e6-ceb3-4844-ad2e-fb7454c2e425/controller/0.log" Dec 03 10:08:16 crc kubenswrapper[4576]: I1203 10:08:16.959925 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.213307 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.225679 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.285018 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.313917 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.537092 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.593336 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.604923 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:08:17 crc kubenswrapper[4576]: I1203 10:08:17.685106 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.138268 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.183137 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.224718 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.273607 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/controller/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.491440 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/frr-metrics/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.501366 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/kube-rbac-proxy/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.575694 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/kube-rbac-proxy-frr/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.911800 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-rjsk6_ea79aacc-f31f-43cd-a0a4-151d9a4703e4/frr-k8s-webhook-server/0.log" Dec 03 10:08:18 crc kubenswrapper[4576]: I1203 10:08:18.913023 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/reloader/0.log" Dec 03 10:08:19 crc kubenswrapper[4576]: I1203 10:08:19.264092 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6b55b5ccff-rm7kb_4641f310-7d84-4f47-8250-5551fc71ae77/manager/0.log" Dec 03 10:08:19 crc kubenswrapper[4576]: I1203 10:08:19.492458 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/frr/0.log" Dec 03 10:08:19 crc kubenswrapper[4576]: I1203 10:08:19.601680 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7fdfc49798-njl4b_8f7f1ce8-dc0b-4508-a9aa-2527f55973ff/webhook-server/0.log" Dec 03 10:08:19 crc kubenswrapper[4576]: I1203 10:08:19.681849 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8244f_7dc75631-a77b-4157-9abb-ba8ea06fb5dd/kube-rbac-proxy/0.log" Dec 03 10:08:20 crc kubenswrapper[4576]: I1203 10:08:20.086633 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8244f_7dc75631-a77b-4157-9abb-ba8ea06fb5dd/speaker/0.log" Dec 03 10:08:31 crc kubenswrapper[4576]: I1203 10:08:31.678385 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:08:31 crc kubenswrapper[4576]: E1203 10:08:31.679221 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.388366 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.555756 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.558844 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.595877 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.818141 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.852923 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:08:34 crc kubenswrapper[4576]: I1203 10:08:34.859268 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/extract/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.025226 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.301688 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.341468 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.366324 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.513623 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.520037 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.677836 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/extract/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.765630 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:08:35 crc kubenswrapper[4576]: I1203 10:08:35.993233 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.023447 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.043471 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.260715 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.323023 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.560202 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:08:36 crc kubenswrapper[4576]: I1203 10:08:36.569313 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/registry-server/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.091224 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.126627 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.154986 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.483260 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.486273 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.853401 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:08:37 crc kubenswrapper[4576]: I1203 10:08:37.892265 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-j26qz_c981f304-77aa-443d-8af7-3d665a32e754/marketplace-operator/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.175439 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/registry-server/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.212107 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.273847 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.290989 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.407922 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.515618 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.625452 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/registry-server/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.707759 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:08:38 crc kubenswrapper[4576]: I1203 10:08:38.978506 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:08:39 crc kubenswrapper[4576]: I1203 10:08:39.021643 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:08:39 crc kubenswrapper[4576]: I1203 10:08:39.034899 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:08:39 crc kubenswrapper[4576]: I1203 10:08:39.276463 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:08:39 crc kubenswrapper[4576]: I1203 10:08:39.297870 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:08:39 crc kubenswrapper[4576]: I1203 10:08:39.695677 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/registry-server/0.log" Dec 03 10:08:42 crc kubenswrapper[4576]: I1203 10:08:42.677767 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:08:42 crc kubenswrapper[4576]: E1203 10:08:42.678394 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:08:56 crc kubenswrapper[4576]: I1203 10:08:56.678356 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:08:56 crc kubenswrapper[4576]: E1203 10:08:56.679064 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:09:11 crc kubenswrapper[4576]: I1203 10:09:11.682369 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:09:11 crc kubenswrapper[4576]: E1203 10:09:11.683122 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:09:17 crc kubenswrapper[4576]: I1203 10:09:17.795105 4576 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-hhd5z container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 10:09:17 crc kubenswrapper[4576]: I1203 10:09:17.795934 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-hhd5z" podUID="5375d73e-a10f-4e11-a6a7-9ec01e8a60ea" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 10:09:23 crc kubenswrapper[4576]: I1203 10:09:23.692483 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:09:23 crc kubenswrapper[4576]: E1203 10:09:23.693567 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:09:34 crc kubenswrapper[4576]: I1203 10:09:34.677694 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:09:34 crc kubenswrapper[4576]: E1203 10:09:34.679003 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:09:47 crc kubenswrapper[4576]: I1203 10:09:47.678708 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:09:47 crc kubenswrapper[4576]: E1203 10:09:47.681089 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:09:59 crc kubenswrapper[4576]: I1203 10:09:59.684284 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:09:59 crc kubenswrapper[4576]: E1203 10:09:59.685245 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:10:11 crc kubenswrapper[4576]: I1203 10:10:11.680060 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:10:11 crc kubenswrapper[4576]: E1203 10:10:11.680986 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.511516 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:22 crc kubenswrapper[4576]: E1203 10:10:22.513703 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="extract-content" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.513806 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="extract-content" Dec 03 10:10:22 crc kubenswrapper[4576]: E1203 10:10:22.513875 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="extract-utilities" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.513931 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="extract-utilities" Dec 03 10:10:22 crc kubenswrapper[4576]: E1203 10:10:22.514010 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="registry-server" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.514068 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="registry-server" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.514389 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb866353-e7d9-4df0-8b3e-00e48556f7de" containerName="registry-server" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.516035 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.522395 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.665735 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.666053 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzszv\" (UniqueName: \"kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.666243 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.767571 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.767870 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.768014 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzszv\" (UniqueName: \"kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.768258 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.768473 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.788615 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzszv\" (UniqueName: \"kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv\") pod \"community-operators-54w88\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:22 crc kubenswrapper[4576]: I1203 10:10:22.854587 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:23 crc kubenswrapper[4576]: I1203 10:10:23.547000 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:23 crc kubenswrapper[4576]: I1203 10:10:23.702077 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:10:23 crc kubenswrapper[4576]: E1203 10:10:23.703315 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:10:24 crc kubenswrapper[4576]: I1203 10:10:24.090742 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerID="2549e413fcead7089a10d9dce9b58d32a5497afbb2146791b952b917bd28ddc0" exitCode=0 Dec 03 10:10:24 crc kubenswrapper[4576]: I1203 10:10:24.090806 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerDied","Data":"2549e413fcead7089a10d9dce9b58d32a5497afbb2146791b952b917bd28ddc0"} Dec 03 10:10:24 crc kubenswrapper[4576]: I1203 10:10:24.090845 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerStarted","Data":"520d68ab7f21096c189f17628eaa7a32a9491bd9044f6ca1025a9b14485a2be2"} Dec 03 10:10:24 crc kubenswrapper[4576]: I1203 10:10:24.094739 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 10:10:26 crc kubenswrapper[4576]: I1203 10:10:26.117174 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerID="87a6f47ea3f78efc15561b35b96dc8395b1ea1f5fbe254ac50a59ba4779c730b" exitCode=0 Dec 03 10:10:26 crc kubenswrapper[4576]: I1203 10:10:26.117311 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerDied","Data":"87a6f47ea3f78efc15561b35b96dc8395b1ea1f5fbe254ac50a59ba4779c730b"} Dec 03 10:10:27 crc kubenswrapper[4576]: I1203 10:10:27.147763 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerStarted","Data":"d3022c281a1aad7078b54ff5836d050ff6bb38aa868c2dd31192bf04b23458d1"} Dec 03 10:10:27 crc kubenswrapper[4576]: I1203 10:10:27.174149 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-54w88" podStartSLOduration=2.678301611 podStartE2EDuration="5.174115449s" podCreationTimestamp="2025-12-03 10:10:22 +0000 UTC" firstStartedPulling="2025-12-03 10:10:24.094287231 +0000 UTC m=+5431.480264235" lastFinishedPulling="2025-12-03 10:10:26.590101089 +0000 UTC m=+5433.976078073" observedRunningTime="2025-12-03 10:10:27.172501615 +0000 UTC m=+5434.558478609" watchObservedRunningTime="2025-12-03 10:10:27.174115449 +0000 UTC m=+5434.560092433" Dec 03 10:10:32 crc kubenswrapper[4576]: I1203 10:10:32.855561 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:32 crc kubenswrapper[4576]: I1203 10:10:32.856127 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:32 crc kubenswrapper[4576]: I1203 10:10:32.911219 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:33 crc kubenswrapper[4576]: I1203 10:10:33.292889 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:33 crc kubenswrapper[4576]: I1203 10:10:33.343756 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:34 crc kubenswrapper[4576]: I1203 10:10:34.678589 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:10:34 crc kubenswrapper[4576]: E1203 10:10:34.679482 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:10:35 crc kubenswrapper[4576]: I1203 10:10:35.254300 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-54w88" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="registry-server" containerID="cri-o://d3022c281a1aad7078b54ff5836d050ff6bb38aa868c2dd31192bf04b23458d1" gracePeriod=2 Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.327771 4576 generic.go:334] "Generic (PLEG): container finished" podID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerID="d3022c281a1aad7078b54ff5836d050ff6bb38aa868c2dd31192bf04b23458d1" exitCode=0 Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.328239 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerDied","Data":"d3022c281a1aad7078b54ff5836d050ff6bb38aa868c2dd31192bf04b23458d1"} Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.328266 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54w88" event={"ID":"e2bbb250-3550-4037-a17a-17f4bbd30eb3","Type":"ContainerDied","Data":"520d68ab7f21096c189f17628eaa7a32a9491bd9044f6ca1025a9b14485a2be2"} Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.328278 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="520d68ab7f21096c189f17628eaa7a32a9491bd9044f6ca1025a9b14485a2be2" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.332626 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.486673 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities\") pod \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.486767 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content\") pod \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.486893 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzszv\" (UniqueName: \"kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv\") pod \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\" (UID: \"e2bbb250-3550-4037-a17a-17f4bbd30eb3\") " Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.489617 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities" (OuterVolumeSpecName: "utilities") pod "e2bbb250-3550-4037-a17a-17f4bbd30eb3" (UID: "e2bbb250-3550-4037-a17a-17f4bbd30eb3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.501650 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv" (OuterVolumeSpecName: "kube-api-access-tzszv") pod "e2bbb250-3550-4037-a17a-17f4bbd30eb3" (UID: "e2bbb250-3550-4037-a17a-17f4bbd30eb3"). InnerVolumeSpecName "kube-api-access-tzszv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.549648 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2bbb250-3550-4037-a17a-17f4bbd30eb3" (UID: "e2bbb250-3550-4037-a17a-17f4bbd30eb3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.589164 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.589205 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2bbb250-3550-4037-a17a-17f4bbd30eb3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:10:36 crc kubenswrapper[4576]: I1203 10:10:36.589216 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzszv\" (UniqueName: \"kubernetes.io/projected/e2bbb250-3550-4037-a17a-17f4bbd30eb3-kube-api-access-tzszv\") on node \"crc\" DevicePath \"\"" Dec 03 10:10:37 crc kubenswrapper[4576]: I1203 10:10:37.337208 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54w88" Dec 03 10:10:37 crc kubenswrapper[4576]: I1203 10:10:37.379225 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:37 crc kubenswrapper[4576]: I1203 10:10:37.392116 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-54w88"] Dec 03 10:10:37 crc kubenswrapper[4576]: I1203 10:10:37.695019 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" path="/var/lib/kubelet/pods/e2bbb250-3550-4037-a17a-17f4bbd30eb3/volumes" Dec 03 10:10:49 crc kubenswrapper[4576]: I1203 10:10:49.679252 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:10:49 crc kubenswrapper[4576]: E1203 10:10:49.680624 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:10:58 crc kubenswrapper[4576]: I1203 10:10:58.267677 4576 scope.go:117] "RemoveContainer" containerID="b14041852be20b0870c486e8e5feb59527cb39fdf4cc2a30b42b4576e5f40b08" Dec 03 10:11:01 crc kubenswrapper[4576]: I1203 10:11:01.677849 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:11:01 crc kubenswrapper[4576]: E1203 10:11:01.678821 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:11:03 crc kubenswrapper[4576]: I1203 10:11:03.655754 4576 generic.go:334] "Generic (PLEG): container finished" podID="dcea8e10-9631-40f3-8579-359218432ec7" containerID="4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf" exitCode=0 Dec 03 10:11:03 crc kubenswrapper[4576]: I1203 10:11:03.655872 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-58lk4/must-gather-bgxvl" event={"ID":"dcea8e10-9631-40f3-8579-359218432ec7","Type":"ContainerDied","Data":"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf"} Dec 03 10:11:03 crc kubenswrapper[4576]: I1203 10:11:03.656707 4576 scope.go:117] "RemoveContainer" containerID="4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf" Dec 03 10:11:04 crc kubenswrapper[4576]: I1203 10:11:04.094103 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-58lk4_must-gather-bgxvl_dcea8e10-9631-40f3-8579-359218432ec7/gather/0.log" Dec 03 10:11:12 crc kubenswrapper[4576]: I1203 10:11:12.677168 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.322353 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-58lk4/must-gather-bgxvl"] Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.323120 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-58lk4/must-gather-bgxvl" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="copy" containerID="cri-o://aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba" gracePeriod=2 Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.330451 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-58lk4/must-gather-bgxvl"] Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.764711 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-58lk4_must-gather-bgxvl_dcea8e10-9631-40f3-8579-359218432ec7/copy/0.log" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.766775 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.848092 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c"} Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.851666 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-58lk4_must-gather-bgxvl_dcea8e10-9631-40f3-8579-359218432ec7/copy/0.log" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.853128 4576 generic.go:334] "Generic (PLEG): container finished" podID="dcea8e10-9631-40f3-8579-359218432ec7" containerID="aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba" exitCode=143 Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.853193 4576 scope.go:117] "RemoveContainer" containerID="aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.853347 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-58lk4/must-gather-bgxvl" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.890295 4576 scope.go:117] "RemoveContainer" containerID="4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.937618 4576 scope.go:117] "RemoveContainer" containerID="aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba" Dec 03 10:11:13 crc kubenswrapper[4576]: E1203 10:11:13.938559 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba\": container with ID starting with aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba not found: ID does not exist" containerID="aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.938596 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba"} err="failed to get container status \"aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba\": rpc error: code = NotFound desc = could not find container \"aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba\": container with ID starting with aea0cb972ed9f6eb9adac9597b4103a3fe9771ed0039dacf4fcad9e2529406ba not found: ID does not exist" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.938620 4576 scope.go:117] "RemoveContainer" containerID="4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf" Dec 03 10:11:13 crc kubenswrapper[4576]: E1203 10:11:13.940953 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf\": container with ID starting with 4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf not found: ID does not exist" containerID="4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.941014 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf"} err="failed to get container status \"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf\": rpc error: code = NotFound desc = could not find container \"4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf\": container with ID starting with 4f10b1db2d7d0850484f2e7bb07067bac4363055e76967d19f96f529af0c1edf not found: ID does not exist" Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.972330 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output\") pod \"dcea8e10-9631-40f3-8579-359218432ec7\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.981046 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk7tr\" (UniqueName: \"kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr\") pod \"dcea8e10-9631-40f3-8579-359218432ec7\" (UID: \"dcea8e10-9631-40f3-8579-359218432ec7\") " Dec 03 10:11:13 crc kubenswrapper[4576]: I1203 10:11:13.987870 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr" (OuterVolumeSpecName: "kube-api-access-dk7tr") pod "dcea8e10-9631-40f3-8579-359218432ec7" (UID: "dcea8e10-9631-40f3-8579-359218432ec7"). InnerVolumeSpecName "kube-api-access-dk7tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:11:14 crc kubenswrapper[4576]: I1203 10:11:14.084232 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk7tr\" (UniqueName: \"kubernetes.io/projected/dcea8e10-9631-40f3-8579-359218432ec7-kube-api-access-dk7tr\") on node \"crc\" DevicePath \"\"" Dec 03 10:11:14 crc kubenswrapper[4576]: I1203 10:11:14.193996 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "dcea8e10-9631-40f3-8579-359218432ec7" (UID: "dcea8e10-9631-40f3-8579-359218432ec7"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:11:14 crc kubenswrapper[4576]: I1203 10:11:14.290965 4576 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dcea8e10-9631-40f3-8579-359218432ec7-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 10:11:15 crc kubenswrapper[4576]: I1203 10:11:15.689102 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcea8e10-9631-40f3-8579-359218432ec7" path="/var/lib/kubelet/pods/dcea8e10-9631-40f3-8579-359218432ec7/volumes" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.407446 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:11:52 crc kubenswrapper[4576]: E1203 10:11:52.408660 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="extract-utilities" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408675 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="extract-utilities" Dec 03 10:11:52 crc kubenswrapper[4576]: E1203 10:11:52.408705 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="gather" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408711 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="gather" Dec 03 10:11:52 crc kubenswrapper[4576]: E1203 10:11:52.408721 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="registry-server" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408727 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="registry-server" Dec 03 10:11:52 crc kubenswrapper[4576]: E1203 10:11:52.408738 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="copy" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408744 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="copy" Dec 03 10:11:52 crc kubenswrapper[4576]: E1203 10:11:52.408752 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="extract-content" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408758 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="extract-content" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408934 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2bbb250-3550-4037-a17a-17f4bbd30eb3" containerName="registry-server" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408950 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="copy" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.408968 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcea8e10-9631-40f3-8579-359218432ec7" containerName="gather" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.410442 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.418440 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.611024 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.611144 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h4xq\" (UniqueName: \"kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.611795 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.713794 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h4xq\" (UniqueName: \"kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.713876 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.714024 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.714705 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.715326 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.736768 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h4xq\" (UniqueName: \"kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq\") pod \"certified-operators-lgxzf\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:52 crc kubenswrapper[4576]: I1203 10:11:52.750522 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:11:53 crc kubenswrapper[4576]: I1203 10:11:53.093815 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:11:53 crc kubenswrapper[4576]: I1203 10:11:53.246949 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerStarted","Data":"2c98bde555ed835d44a43f4913baca197bdb37dacb0a0587ae46945e6d12c16d"} Dec 03 10:11:54 crc kubenswrapper[4576]: I1203 10:11:54.262709 4576 generic.go:334] "Generic (PLEG): container finished" podID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerID="89ed6475ce6defe712136e1db2c5efbd70af996bd48b7deedeb2081e6ed4ad37" exitCode=0 Dec 03 10:11:54 crc kubenswrapper[4576]: I1203 10:11:54.262786 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerDied","Data":"89ed6475ce6defe712136e1db2c5efbd70af996bd48b7deedeb2081e6ed4ad37"} Dec 03 10:11:55 crc kubenswrapper[4576]: I1203 10:11:55.272012 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerStarted","Data":"bd6adf083773045372a62d0f05ceac9424285264fd100949507e0310631564ce"} Dec 03 10:11:56 crc kubenswrapper[4576]: I1203 10:11:56.286303 4576 generic.go:334] "Generic (PLEG): container finished" podID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerID="bd6adf083773045372a62d0f05ceac9424285264fd100949507e0310631564ce" exitCode=0 Dec 03 10:11:56 crc kubenswrapper[4576]: I1203 10:11:56.286424 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerDied","Data":"bd6adf083773045372a62d0f05ceac9424285264fd100949507e0310631564ce"} Dec 03 10:11:57 crc kubenswrapper[4576]: I1203 10:11:57.296522 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerStarted","Data":"5f352308c5791c4e5d32a91e72321be85c2d7ab232d96fb70f78f4f88659ef1d"} Dec 03 10:11:57 crc kubenswrapper[4576]: I1203 10:11:57.323966 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lgxzf" podStartSLOduration=2.8605069739999998 podStartE2EDuration="5.32392896s" podCreationTimestamp="2025-12-03 10:11:52 +0000 UTC" firstStartedPulling="2025-12-03 10:11:54.265824397 +0000 UTC m=+5521.651801401" lastFinishedPulling="2025-12-03 10:11:56.729246403 +0000 UTC m=+5524.115223387" observedRunningTime="2025-12-03 10:11:57.317192579 +0000 UTC m=+5524.703169563" watchObservedRunningTime="2025-12-03 10:11:57.32392896 +0000 UTC m=+5524.709905944" Dec 03 10:11:58 crc kubenswrapper[4576]: I1203 10:11:58.378749 4576 scope.go:117] "RemoveContainer" containerID="c6f33df55e1ab6708f81dd627f67e816510503e143ebd6f04015494a73165f68" Dec 03 10:12:02 crc kubenswrapper[4576]: I1203 10:12:02.763165 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:02 crc kubenswrapper[4576]: I1203 10:12:02.763802 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:02 crc kubenswrapper[4576]: I1203 10:12:02.842248 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:03 crc kubenswrapper[4576]: I1203 10:12:03.412143 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:05 crc kubenswrapper[4576]: I1203 10:12:05.079986 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:12:05 crc kubenswrapper[4576]: I1203 10:12:05.376350 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lgxzf" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="registry-server" containerID="cri-o://5f352308c5791c4e5d32a91e72321be85c2d7ab232d96fb70f78f4f88659ef1d" gracePeriod=2 Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.400751 4576 generic.go:334] "Generic (PLEG): container finished" podID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerID="5f352308c5791c4e5d32a91e72321be85c2d7ab232d96fb70f78f4f88659ef1d" exitCode=0 Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.400815 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerDied","Data":"5f352308c5791c4e5d32a91e72321be85c2d7ab232d96fb70f78f4f88659ef1d"} Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.401146 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgxzf" event={"ID":"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78","Type":"ContainerDied","Data":"2c98bde555ed835d44a43f4913baca197bdb37dacb0a0587ae46945e6d12c16d"} Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.401164 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c98bde555ed835d44a43f4913baca197bdb37dacb0a0587ae46945e6d12c16d" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.420626 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.572772 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h4xq\" (UniqueName: \"kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq\") pod \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.574054 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content\") pod \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.574153 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities\") pod \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\" (UID: \"ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78\") " Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.575244 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities" (OuterVolumeSpecName: "utilities") pod "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" (UID: "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.583629 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq" (OuterVolumeSpecName: "kube-api-access-4h4xq") pod "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" (UID: "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78"). InnerVolumeSpecName "kube-api-access-4h4xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.648907 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" (UID: "ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.677192 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.677226 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:12:06 crc kubenswrapper[4576]: I1203 10:12:06.677237 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h4xq\" (UniqueName: \"kubernetes.io/projected/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78-kube-api-access-4h4xq\") on node \"crc\" DevicePath \"\"" Dec 03 10:12:07 crc kubenswrapper[4576]: I1203 10:12:07.409704 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgxzf" Dec 03 10:12:07 crc kubenswrapper[4576]: I1203 10:12:07.450749 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:12:07 crc kubenswrapper[4576]: I1203 10:12:07.461018 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lgxzf"] Dec 03 10:12:07 crc kubenswrapper[4576]: I1203 10:12:07.690128 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" path="/var/lib/kubelet/pods/ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78/volumes" Dec 03 10:13:39 crc kubenswrapper[4576]: I1203 10:13:39.681080 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:13:39 crc kubenswrapper[4576]: I1203 10:13:39.681922 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.674269 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:08 crc kubenswrapper[4576]: E1203 10:14:08.675313 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="extract-content" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.675330 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="extract-content" Dec 03 10:14:08 crc kubenswrapper[4576]: E1203 10:14:08.675347 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="registry-server" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.675353 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="registry-server" Dec 03 10:14:08 crc kubenswrapper[4576]: E1203 10:14:08.675378 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="extract-utilities" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.675385 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="extract-utilities" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.675591 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab1db9cb-bc81-40a6-aa7f-0d36ffeaaf78" containerName="registry-server" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.676993 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.692674 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.828754 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.828905 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.828940 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd8xt\" (UniqueName: \"kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.962226 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.962272 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd8xt\" (UniqueName: \"kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.962363 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.962856 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.962927 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.983465 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd8xt\" (UniqueName: \"kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt\") pod \"redhat-operators-g9xst\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:08 crc kubenswrapper[4576]: I1203 10:14:08.996384 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.453696 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.680578 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.680855 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.809703 4576 generic.go:334] "Generic (PLEG): container finished" podID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerID="d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37" exitCode=0 Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.809758 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerDied","Data":"d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37"} Dec 03 10:14:09 crc kubenswrapper[4576]: I1203 10:14:09.809791 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerStarted","Data":"fc1b9af99667d7df02d50bc4d08a5b1941f579f067c8787d1447628ac7828172"} Dec 03 10:14:11 crc kubenswrapper[4576]: I1203 10:14:11.843639 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerStarted","Data":"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c"} Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.592590 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8vjbs/must-gather-gp9vq"] Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.594412 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.599329 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8vjbs"/"openshift-service-ca.crt" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.601331 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8vjbs"/"kube-root-ca.crt" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.627471 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8vjbs/must-gather-gp9vq"] Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.754914 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8mgh\" (UniqueName: \"kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.755007 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.864687 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8mgh\" (UniqueName: \"kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.864789 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.865178 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.908660 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8mgh\" (UniqueName: \"kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh\") pod \"must-gather-gp9vq\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:14 crc kubenswrapper[4576]: I1203 10:14:14.925966 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:14:16 crc kubenswrapper[4576]: I1203 10:14:16.943595 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8vjbs/must-gather-gp9vq"] Dec 03 10:14:17 crc kubenswrapper[4576]: I1203 10:14:17.912048 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" event={"ID":"4e72ede7-9d4a-4ece-a2e0-72131da68420","Type":"ContainerStarted","Data":"3a1b413f5b90bf3dc3e3d95a1938bdbd03b8f82548d6271947e959f9907b5e0c"} Dec 03 10:14:19 crc kubenswrapper[4576]: I1203 10:14:19.928574 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" event={"ID":"4e72ede7-9d4a-4ece-a2e0-72131da68420","Type":"ContainerStarted","Data":"6370799c1ec6c212111c1db9a9fdab87fa28e0cb68470107ac49f67f67e7de3c"} Dec 03 10:14:20 crc kubenswrapper[4576]: I1203 10:14:20.940179 4576 generic.go:334] "Generic (PLEG): container finished" podID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerID="ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c" exitCode=0 Dec 03 10:14:20 crc kubenswrapper[4576]: I1203 10:14:20.940247 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerDied","Data":"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c"} Dec 03 10:14:20 crc kubenswrapper[4576]: I1203 10:14:20.942641 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" event={"ID":"4e72ede7-9d4a-4ece-a2e0-72131da68420","Type":"ContainerStarted","Data":"1ce2798a9833b85f18cd451299b9bab47e87608023f5218f96831ba210852239"} Dec 03 10:14:20 crc kubenswrapper[4576]: I1203 10:14:20.986944 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" podStartSLOduration=6.986928726 podStartE2EDuration="6.986928726s" podCreationTimestamp="2025-12-03 10:14:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:14:20.986198187 +0000 UTC m=+5668.372175171" watchObservedRunningTime="2025-12-03 10:14:20.986928726 +0000 UTC m=+5668.372905700" Dec 03 10:14:21 crc kubenswrapper[4576]: I1203 10:14:21.960621 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerStarted","Data":"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552"} Dec 03 10:14:21 crc kubenswrapper[4576]: I1203 10:14:21.981555 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g9xst" podStartSLOduration=2.060014955 podStartE2EDuration="13.981501739s" podCreationTimestamp="2025-12-03 10:14:08 +0000 UTC" firstStartedPulling="2025-12-03 10:14:09.811225185 +0000 UTC m=+5657.197202169" lastFinishedPulling="2025-12-03 10:14:21.732711969 +0000 UTC m=+5669.118688953" observedRunningTime="2025-12-03 10:14:21.980348248 +0000 UTC m=+5669.366325232" watchObservedRunningTime="2025-12-03 10:14:21.981501739 +0000 UTC m=+5669.367478723" Dec 03 10:14:24 crc kubenswrapper[4576]: I1203 10:14:24.947672 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-6xl7t"] Dec 03 10:14:24 crc kubenswrapper[4576]: I1203 10:14:24.949668 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:24 crc kubenswrapper[4576]: I1203 10:14:24.951662 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8vjbs"/"default-dockercfg-nzt8r" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.029079 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.029148 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hb84\" (UniqueName: \"kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.132379 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.132471 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hb84\" (UniqueName: \"kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.132690 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.164350 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hb84\" (UniqueName: \"kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84\") pod \"crc-debug-6xl7t\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.271817 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:14:25 crc kubenswrapper[4576]: W1203 10:14:25.335285 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37d36047_85ec_49ac_a669_c6697fec69a2.slice/crio-d0705bf3cdda292edbeee26cd5bd4430c3640ea1bef7bb056bd96f93212f3bc5 WatchSource:0}: Error finding container d0705bf3cdda292edbeee26cd5bd4430c3640ea1bef7bb056bd96f93212f3bc5: Status 404 returned error can't find the container with id d0705bf3cdda292edbeee26cd5bd4430c3640ea1bef7bb056bd96f93212f3bc5 Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.997111 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" event={"ID":"37d36047-85ec-49ac-a669-c6697fec69a2","Type":"ContainerStarted","Data":"91fd6392d4dc13f4b14280a98893e10b8e3d3444a857b4057cf40e969d852e75"} Dec 03 10:14:25 crc kubenswrapper[4576]: I1203 10:14:25.997759 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" event={"ID":"37d36047-85ec-49ac-a669-c6697fec69a2","Type":"ContainerStarted","Data":"d0705bf3cdda292edbeee26cd5bd4430c3640ea1bef7bb056bd96f93212f3bc5"} Dec 03 10:14:26 crc kubenswrapper[4576]: I1203 10:14:26.022236 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" podStartSLOduration=2.022213341 podStartE2EDuration="2.022213341s" podCreationTimestamp="2025-12-03 10:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:14:26.017514263 +0000 UTC m=+5673.403491257" watchObservedRunningTime="2025-12-03 10:14:26.022213341 +0000 UTC m=+5673.408190335" Dec 03 10:14:28 crc kubenswrapper[4576]: I1203 10:14:28.997347 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:28 crc kubenswrapper[4576]: I1203 10:14:28.999011 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:30 crc kubenswrapper[4576]: I1203 10:14:30.048441 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g9xst" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="registry-server" probeResult="failure" output=< Dec 03 10:14:30 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 10:14:30 crc kubenswrapper[4576]: > Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.054910 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.127060 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.681058 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.683385 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.689496 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.690062 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.690131 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c" gracePeriod=600 Dec 03 10:14:39 crc kubenswrapper[4576]: I1203 10:14:39.880125 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.193032 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c" exitCode=0 Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.193110 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c"} Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.193292 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a"} Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.193314 4576 scope.go:117] "RemoveContainer" containerID="1eb3d8c30b6286eadddb1522aa392505713e173cbe42e5fd9209c65960d0c2af" Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.193432 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g9xst" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="registry-server" containerID="cri-o://72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552" gracePeriod=2 Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.762624 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.881363 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd8xt\" (UniqueName: \"kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt\") pod \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.881769 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content\") pod \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.881837 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities\") pod \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\" (UID: \"0bf694c0-e325-42d7-9ae8-bc1eff85bf41\") " Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.883359 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities" (OuterVolumeSpecName: "utilities") pod "0bf694c0-e325-42d7-9ae8-bc1eff85bf41" (UID: "0bf694c0-e325-42d7-9ae8-bc1eff85bf41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.906162 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt" (OuterVolumeSpecName: "kube-api-access-fd8xt") pod "0bf694c0-e325-42d7-9ae8-bc1eff85bf41" (UID: "0bf694c0-e325-42d7-9ae8-bc1eff85bf41"). InnerVolumeSpecName "kube-api-access-fd8xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.984240 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd8xt\" (UniqueName: \"kubernetes.io/projected/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-kube-api-access-fd8xt\") on node \"crc\" DevicePath \"\"" Dec 03 10:14:40 crc kubenswrapper[4576]: I1203 10:14:40.985058 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.048823 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0bf694c0-e325-42d7-9ae8-bc1eff85bf41" (UID: "0bf694c0-e325-42d7-9ae8-bc1eff85bf41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.087151 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0bf694c0-e325-42d7-9ae8-bc1eff85bf41-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.204016 4576 generic.go:334] "Generic (PLEG): container finished" podID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerID="72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552" exitCode=0 Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.204112 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerDied","Data":"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552"} Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.204141 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g9xst" event={"ID":"0bf694c0-e325-42d7-9ae8-bc1eff85bf41","Type":"ContainerDied","Data":"fc1b9af99667d7df02d50bc4d08a5b1941f579f067c8787d1447628ac7828172"} Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.204159 4576 scope.go:117] "RemoveContainer" containerID="72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.205293 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g9xst" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.238285 4576 scope.go:117] "RemoveContainer" containerID="ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.255050 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.285841 4576 scope.go:117] "RemoveContainer" containerID="d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.293635 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g9xst"] Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.329656 4576 scope.go:117] "RemoveContainer" containerID="72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552" Dec 03 10:14:41 crc kubenswrapper[4576]: E1203 10:14:41.330201 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552\": container with ID starting with 72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552 not found: ID does not exist" containerID="72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.330238 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552"} err="failed to get container status \"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552\": rpc error: code = NotFound desc = could not find container \"72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552\": container with ID starting with 72c6fc8b306a4737f59867e89c7ac201ea7e35ca87a3a90a7271b32ff89b9552 not found: ID does not exist" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.330264 4576 scope.go:117] "RemoveContainer" containerID="ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c" Dec 03 10:14:41 crc kubenswrapper[4576]: E1203 10:14:41.330597 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c\": container with ID starting with ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c not found: ID does not exist" containerID="ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.330624 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c"} err="failed to get container status \"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c\": rpc error: code = NotFound desc = could not find container \"ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c\": container with ID starting with ec16b3b0abc3669395796c4f948c5b697b1ed85b9e330c9aa77807fb8261b22c not found: ID does not exist" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.330642 4576 scope.go:117] "RemoveContainer" containerID="d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37" Dec 03 10:14:41 crc kubenswrapper[4576]: E1203 10:14:41.330926 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37\": container with ID starting with d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37 not found: ID does not exist" containerID="d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.330962 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37"} err="failed to get container status \"d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37\": rpc error: code = NotFound desc = could not find container \"d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37\": container with ID starting with d7594ef23c733a5877f77c57754ee37b26a908cfce929ce72b3d54bfe8689a37 not found: ID does not exist" Dec 03 10:14:41 crc kubenswrapper[4576]: I1203 10:14:41.688276 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" path="/var/lib/kubelet/pods/0bf694c0-e325-42d7-9ae8-bc1eff85bf41/volumes" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.170449 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg"] Dec 03 10:15:00 crc kubenswrapper[4576]: E1203 10:15:00.173095 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="extract-utilities" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.174406 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="extract-utilities" Dec 03 10:15:00 crc kubenswrapper[4576]: E1203 10:15:00.174576 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="registry-server" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.174709 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="registry-server" Dec 03 10:15:00 crc kubenswrapper[4576]: E1203 10:15:00.174888 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="extract-content" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.175173 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="extract-content" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.175688 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bf694c0-e325-42d7-9ae8-bc1eff85bf41" containerName="registry-server" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.177048 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.179632 4576 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.182128 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.184941 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg"] Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.277116 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.277214 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4h8d\" (UniqueName: \"kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.277350 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.379557 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4h8d\" (UniqueName: \"kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.379946 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.380131 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.380752 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.395877 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.399311 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4h8d\" (UniqueName: \"kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d\") pod \"collect-profiles-29412615-x7mrg\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.501017 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:00 crc kubenswrapper[4576]: I1203 10:15:00.990439 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg"] Dec 03 10:15:01 crc kubenswrapper[4576]: I1203 10:15:01.399209 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" event={"ID":"89518269-a0cd-4b34-8534-f2943ef32006","Type":"ContainerStarted","Data":"8f051dbb31cdad3155ef6f0ba8483280896c7bda776893686774859c2c95c6c7"} Dec 03 10:15:01 crc kubenswrapper[4576]: I1203 10:15:01.399258 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" event={"ID":"89518269-a0cd-4b34-8534-f2943ef32006","Type":"ContainerStarted","Data":"0dfc1c74e2abc1a6d21ebadf943c888c03e2fe57162797f5cf8ca8a60a1d2c2b"} Dec 03 10:15:01 crc kubenswrapper[4576]: I1203 10:15:01.420734 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" podStartSLOduration=1.420709325 podStartE2EDuration="1.420709325s" podCreationTimestamp="2025-12-03 10:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:15:01.414163508 +0000 UTC m=+5708.800140502" watchObservedRunningTime="2025-12-03 10:15:01.420709325 +0000 UTC m=+5708.806686319" Dec 03 10:15:02 crc kubenswrapper[4576]: I1203 10:15:02.408379 4576 generic.go:334] "Generic (PLEG): container finished" podID="89518269-a0cd-4b34-8534-f2943ef32006" containerID="8f051dbb31cdad3155ef6f0ba8483280896c7bda776893686774859c2c95c6c7" exitCode=0 Dec 03 10:15:02 crc kubenswrapper[4576]: I1203 10:15:02.408434 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" event={"ID":"89518269-a0cd-4b34-8534-f2943ef32006","Type":"ContainerDied","Data":"8f051dbb31cdad3155ef6f0ba8483280896c7bda776893686774859c2c95c6c7"} Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.777790 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.847688 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4h8d\" (UniqueName: \"kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d\") pod \"89518269-a0cd-4b34-8534-f2943ef32006\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.847764 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume\") pod \"89518269-a0cd-4b34-8534-f2943ef32006\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.847909 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume\") pod \"89518269-a0cd-4b34-8534-f2943ef32006\" (UID: \"89518269-a0cd-4b34-8534-f2943ef32006\") " Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.849541 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume" (OuterVolumeSpecName: "config-volume") pod "89518269-a0cd-4b34-8534-f2943ef32006" (UID: "89518269-a0cd-4b34-8534-f2943ef32006"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.856696 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "89518269-a0cd-4b34-8534-f2943ef32006" (UID: "89518269-a0cd-4b34-8534-f2943ef32006"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.859752 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d" (OuterVolumeSpecName: "kube-api-access-q4h8d") pod "89518269-a0cd-4b34-8534-f2943ef32006" (UID: "89518269-a0cd-4b34-8534-f2943ef32006"). InnerVolumeSpecName "kube-api-access-q4h8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.950263 4576 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89518269-a0cd-4b34-8534-f2943ef32006-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.950315 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4h8d\" (UniqueName: \"kubernetes.io/projected/89518269-a0cd-4b34-8534-f2943ef32006-kube-api-access-q4h8d\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:03 crc kubenswrapper[4576]: I1203 10:15:03.950328 4576 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89518269-a0cd-4b34-8534-f2943ef32006-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:04 crc kubenswrapper[4576]: I1203 10:15:04.429943 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" event={"ID":"89518269-a0cd-4b34-8534-f2943ef32006","Type":"ContainerDied","Data":"0dfc1c74e2abc1a6d21ebadf943c888c03e2fe57162797f5cf8ca8a60a1d2c2b"} Dec 03 10:15:04 crc kubenswrapper[4576]: I1203 10:15:04.429987 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dfc1c74e2abc1a6d21ebadf943c888c03e2fe57162797f5cf8ca8a60a1d2c2b" Dec 03 10:15:04 crc kubenswrapper[4576]: I1203 10:15:04.430001 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412615-x7mrg" Dec 03 10:15:04 crc kubenswrapper[4576]: I1203 10:15:04.503011 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499"] Dec 03 10:15:04 crc kubenswrapper[4576]: I1203 10:15:04.512817 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412570-b6499"] Dec 03 10:15:05 crc kubenswrapper[4576]: I1203 10:15:05.688582 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c374ea3b-7842-47d4-b6ca-2462ce18ce46" path="/var/lib/kubelet/pods/c374ea3b-7842-47d4-b6ca-2462ce18ce46/volumes" Dec 03 10:15:11 crc kubenswrapper[4576]: I1203 10:15:11.486012 4576 generic.go:334] "Generic (PLEG): container finished" podID="37d36047-85ec-49ac-a669-c6697fec69a2" containerID="91fd6392d4dc13f4b14280a98893e10b8e3d3444a857b4057cf40e969d852e75" exitCode=0 Dec 03 10:15:11 crc kubenswrapper[4576]: I1203 10:15:11.486592 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" event={"ID":"37d36047-85ec-49ac-a669-c6697fec69a2","Type":"ContainerDied","Data":"91fd6392d4dc13f4b14280a98893e10b8e3d3444a857b4057cf40e969d852e75"} Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.642460 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.683329 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-6xl7t"] Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.692676 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-6xl7t"] Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.729068 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hb84\" (UniqueName: \"kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84\") pod \"37d36047-85ec-49ac-a669-c6697fec69a2\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.729389 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host\") pod \"37d36047-85ec-49ac-a669-c6697fec69a2\" (UID: \"37d36047-85ec-49ac-a669-c6697fec69a2\") " Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.729627 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host" (OuterVolumeSpecName: "host") pod "37d36047-85ec-49ac-a669-c6697fec69a2" (UID: "37d36047-85ec-49ac-a669-c6697fec69a2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.735074 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84" (OuterVolumeSpecName: "kube-api-access-7hb84") pod "37d36047-85ec-49ac-a669-c6697fec69a2" (UID: "37d36047-85ec-49ac-a669-c6697fec69a2"). InnerVolumeSpecName "kube-api-access-7hb84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.832929 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hb84\" (UniqueName: \"kubernetes.io/projected/37d36047-85ec-49ac-a669-c6697fec69a2-kube-api-access-7hb84\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:12 crc kubenswrapper[4576]: I1203 10:15:12.832974 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/37d36047-85ec-49ac-a669-c6697fec69a2-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.516408 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0705bf3cdda292edbeee26cd5bd4430c3640ea1bef7bb056bd96f93212f3bc5" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.516495 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-6xl7t" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.687785 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37d36047-85ec-49ac-a669-c6697fec69a2" path="/var/lib/kubelet/pods/37d36047-85ec-49ac-a669-c6697fec69a2/volumes" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.904700 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-8lfcg"] Dec 03 10:15:13 crc kubenswrapper[4576]: E1203 10:15:13.905101 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d36047-85ec-49ac-a669-c6697fec69a2" containerName="container-00" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.905118 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d36047-85ec-49ac-a669-c6697fec69a2" containerName="container-00" Dec 03 10:15:13 crc kubenswrapper[4576]: E1203 10:15:13.905153 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89518269-a0cd-4b34-8534-f2943ef32006" containerName="collect-profiles" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.905160 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="89518269-a0cd-4b34-8534-f2943ef32006" containerName="collect-profiles" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.905322 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="37d36047-85ec-49ac-a669-c6697fec69a2" containerName="container-00" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.905345 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="89518269-a0cd-4b34-8534-f2943ef32006" containerName="collect-profiles" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.906019 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.908025 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8vjbs"/"default-dockercfg-nzt8r" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.955506 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:13 crc kubenswrapper[4576]: I1203 10:15:13.955894 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7hml\" (UniqueName: \"kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.057893 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.058015 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7hml\" (UniqueName: \"kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.058278 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.074304 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7hml\" (UniqueName: \"kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml\") pod \"crc-debug-8lfcg\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.226487 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.526096 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" event={"ID":"594457cb-a00d-4d27-9bd5-f420001d2608","Type":"ContainerStarted","Data":"f38a90c5d0badddf3862f2449c0df8497e483848689bd37718992b1e094367c4"} Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.526469 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" event={"ID":"594457cb-a00d-4d27-9bd5-f420001d2608","Type":"ContainerStarted","Data":"af08fc21c2c26afb96634a232f241e3238eae797edb90222f89cdb46689b02e1"} Dec 03 10:15:14 crc kubenswrapper[4576]: I1203 10:15:14.546919 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" podStartSLOduration=1.546898499 podStartE2EDuration="1.546898499s" podCreationTimestamp="2025-12-03 10:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 10:15:14.540067864 +0000 UTC m=+5721.926044848" watchObservedRunningTime="2025-12-03 10:15:14.546898499 +0000 UTC m=+5721.932875483" Dec 03 10:15:15 crc kubenswrapper[4576]: I1203 10:15:15.553003 4576 generic.go:334] "Generic (PLEG): container finished" podID="594457cb-a00d-4d27-9bd5-f420001d2608" containerID="f38a90c5d0badddf3862f2449c0df8497e483848689bd37718992b1e094367c4" exitCode=0 Dec 03 10:15:15 crc kubenswrapper[4576]: I1203 10:15:15.553345 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" event={"ID":"594457cb-a00d-4d27-9bd5-f420001d2608","Type":"ContainerDied","Data":"f38a90c5d0badddf3862f2449c0df8497e483848689bd37718992b1e094367c4"} Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.732958 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.818266 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7hml\" (UniqueName: \"kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml\") pod \"594457cb-a00d-4d27-9bd5-f420001d2608\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.818426 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host\") pod \"594457cb-a00d-4d27-9bd5-f420001d2608\" (UID: \"594457cb-a00d-4d27-9bd5-f420001d2608\") " Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.819108 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host" (OuterVolumeSpecName: "host") pod "594457cb-a00d-4d27-9bd5-f420001d2608" (UID: "594457cb-a00d-4d27-9bd5-f420001d2608"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.826826 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml" (OuterVolumeSpecName: "kube-api-access-g7hml") pod "594457cb-a00d-4d27-9bd5-f420001d2608" (UID: "594457cb-a00d-4d27-9bd5-f420001d2608"). InnerVolumeSpecName "kube-api-access-g7hml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.928581 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7hml\" (UniqueName: \"kubernetes.io/projected/594457cb-a00d-4d27-9bd5-f420001d2608-kube-api-access-g7hml\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.933054 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594457cb-a00d-4d27-9bd5-f420001d2608-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.959753 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-8lfcg"] Dec 03 10:15:16 crc kubenswrapper[4576]: I1203 10:15:16.976677 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-8lfcg"] Dec 03 10:15:17 crc kubenswrapper[4576]: I1203 10:15:17.574848 4576 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af08fc21c2c26afb96634a232f241e3238eae797edb90222f89cdb46689b02e1" Dec 03 10:15:17 crc kubenswrapper[4576]: I1203 10:15:17.575296 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-8lfcg" Dec 03 10:15:17 crc kubenswrapper[4576]: I1203 10:15:17.687893 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594457cb-a00d-4d27-9bd5-f420001d2608" path="/var/lib/kubelet/pods/594457cb-a00d-4d27-9bd5-f420001d2608/volumes" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.194206 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-crxt8"] Dec 03 10:15:18 crc kubenswrapper[4576]: E1203 10:15:18.195699 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594457cb-a00d-4d27-9bd5-f420001d2608" containerName="container-00" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.195784 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="594457cb-a00d-4d27-9bd5-f420001d2608" containerName="container-00" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.196059 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="594457cb-a00d-4d27-9bd5-f420001d2608" containerName="container-00" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.196756 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.200133 4576 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8vjbs"/"default-dockercfg-nzt8r" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.280743 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47vp6\" (UniqueName: \"kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.280871 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.382645 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47vp6\" (UniqueName: \"kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.382991 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.383123 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.400598 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47vp6\" (UniqueName: \"kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6\") pod \"crc-debug-crxt8\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.512212 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:18 crc kubenswrapper[4576]: W1203 10:15:18.553407 4576 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3b7c679_bb03_44e7_b3bb_583603f506f5.slice/crio-05dc4c6552002aa49f37c5f46ab01be5ea073bb160ea2f5b1bda609d2d7d6c5c WatchSource:0}: Error finding container 05dc4c6552002aa49f37c5f46ab01be5ea073bb160ea2f5b1bda609d2d7d6c5c: Status 404 returned error can't find the container with id 05dc4c6552002aa49f37c5f46ab01be5ea073bb160ea2f5b1bda609d2d7d6c5c Dec 03 10:15:18 crc kubenswrapper[4576]: I1203 10:15:18.585232 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" event={"ID":"b3b7c679-bb03-44e7-b3bb-583603f506f5","Type":"ContainerStarted","Data":"05dc4c6552002aa49f37c5f46ab01be5ea073bb160ea2f5b1bda609d2d7d6c5c"} Dec 03 10:15:19 crc kubenswrapper[4576]: I1203 10:15:19.595135 4576 generic.go:334] "Generic (PLEG): container finished" podID="b3b7c679-bb03-44e7-b3bb-583603f506f5" containerID="146629e2c2eb23797a882bb58b93332899e2f65ff537f24df72b6e1e19e10295" exitCode=0 Dec 03 10:15:19 crc kubenswrapper[4576]: I1203 10:15:19.595199 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" event={"ID":"b3b7c679-bb03-44e7-b3bb-583603f506f5","Type":"ContainerDied","Data":"146629e2c2eb23797a882bb58b93332899e2f65ff537f24df72b6e1e19e10295"} Dec 03 10:15:19 crc kubenswrapper[4576]: I1203 10:15:19.639405 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-crxt8"] Dec 03 10:15:19 crc kubenswrapper[4576]: I1203 10:15:19.649788 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8vjbs/crc-debug-crxt8"] Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.699056 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.825594 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47vp6\" (UniqueName: \"kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6\") pod \"b3b7c679-bb03-44e7-b3bb-583603f506f5\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.825967 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host\") pod \"b3b7c679-bb03-44e7-b3bb-583603f506f5\" (UID: \"b3b7c679-bb03-44e7-b3bb-583603f506f5\") " Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.826781 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host" (OuterVolumeSpecName: "host") pod "b3b7c679-bb03-44e7-b3bb-583603f506f5" (UID: "b3b7c679-bb03-44e7-b3bb-583603f506f5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.842463 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6" (OuterVolumeSpecName: "kube-api-access-47vp6") pod "b3b7c679-bb03-44e7-b3bb-583603f506f5" (UID: "b3b7c679-bb03-44e7-b3bb-583603f506f5"). InnerVolumeSpecName "kube-api-access-47vp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.928365 4576 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3b7c679-bb03-44e7-b3bb-583603f506f5-host\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:20 crc kubenswrapper[4576]: I1203 10:15:20.928394 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47vp6\" (UniqueName: \"kubernetes.io/projected/b3b7c679-bb03-44e7-b3bb-583603f506f5-kube-api-access-47vp6\") on node \"crc\" DevicePath \"\"" Dec 03 10:15:21 crc kubenswrapper[4576]: I1203 10:15:21.614396 4576 scope.go:117] "RemoveContainer" containerID="146629e2c2eb23797a882bb58b93332899e2f65ff537f24df72b6e1e19e10295" Dec 03 10:15:21 crc kubenswrapper[4576]: I1203 10:15:21.614712 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/crc-debug-crxt8" Dec 03 10:15:21 crc kubenswrapper[4576]: I1203 10:15:21.702231 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3b7c679-bb03-44e7-b3bb-583603f506f5" path="/var/lib/kubelet/pods/b3b7c679-bb03-44e7-b3bb-583603f506f5/volumes" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.131677 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-679c878b58-l5t4t_c3348234-cbbe-464e-b7dd-493151ce96ef/barbican-api/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.360260 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-679c878b58-l5t4t_c3348234-cbbe-464e-b7dd-493151ce96ef/barbican-api-log/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.449728 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9f4d5dcf8-5lvz8_14ee4879-18ef-4d7f-956f-03297ff160fe/barbican-keystone-listener/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.656181 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-9f4d5dcf8-5lvz8_14ee4879-18ef-4d7f-956f-03297ff160fe/barbican-keystone-listener-log/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.710133 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7678fb6bf-m8m4k_e8e42590-8fdb-4c05-a5e1-e2cddbeb0731/barbican-worker/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.726574 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7678fb6bf-m8m4k_e8e42590-8fdb-4c05-a5e1-e2cddbeb0731/barbican-worker-log/0.log" Dec 03 10:15:54 crc kubenswrapper[4576]: I1203 10:15:54.949906 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pd2lv_a1b39b7c-1acb-467a-904c-7ee77350804b/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.118036 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/ceilometer-central-agent/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.221161 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/proxy-httpd/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.350820 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/ceilometer-notification-agent/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.464643 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cba79dc3-6e8e-465e-a3f9-9e2fd67972af/sg-core/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.519471 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ee570a8-cd83-4db6-bffa-080a2dae8552/cinder-api/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.592477 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9ee570a8-cd83-4db6-bffa-080a2dae8552/cinder-api-log/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.893076 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_11c5d059-87c8-4fe8-ad1b-e50f1b029e8b/probe/0.log" Dec 03 10:15:55 crc kubenswrapper[4576]: I1203 10:15:55.907761 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_11c5d059-87c8-4fe8-ad1b-e50f1b029e8b/cinder-scheduler/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.135721 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-sdlvm_308d4747-e427-4848-8961-a21d39dbd449/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.208184 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-q8xlq_be59e37f-89da-4b5c-9126-9fd6fe4d9ec8/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.323991 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/init/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.555117 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/init/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.663779 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-hnxl7_fc6a72d3-31b7-461d-82f3-09536b77f9e6/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.797567 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67cb876dc9-z42kz_9e70ca50-713f-40fd-ac9a-89af89af62ba/dnsmasq-dns/0.log" Dec 03 10:15:56 crc kubenswrapper[4576]: I1203 10:15:56.964031 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ebb315c8-6124-44ed-8bb7-f82b228893e2/glance-httpd/0.log" Dec 03 10:15:57 crc kubenswrapper[4576]: I1203 10:15:57.060774 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ebb315c8-6124-44ed-8bb7-f82b228893e2/glance-log/0.log" Dec 03 10:15:57 crc kubenswrapper[4576]: I1203 10:15:57.471445 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ad4e4d0b-1390-463b-9337-3b3d8f6ca758/glance-log/0.log" Dec 03 10:15:57 crc kubenswrapper[4576]: I1203 10:15:57.526436 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ad4e4d0b-1390-463b-9337-3b3d8f6ca758/glance-httpd/0.log" Dec 03 10:15:57 crc kubenswrapper[4576]: I1203 10:15:57.735877 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon/1.log" Dec 03 10:15:57 crc kubenswrapper[4576]: I1203 10:15:57.833561 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon/0.log" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.161178 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j6249_55ad3ecf-5525-4292-b4e9-98456a2dc903/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.280673 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6d649695d8-6rtxn_288ed488-5270-4966-b866-f9f015262989/horizon-log/0.log" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.371670 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-2vcbc_71120894-497f-438e-a42f-f3e6fd50d2de/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.525155 4576 scope.go:117] "RemoveContainer" containerID="75df304071699a1d95ca8bb581e66e6f0d5c9e02d587bcae2a02be47e7c40d61" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.635598 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29412601-4r4n5_ccd3075b-2364-4539-b6c1-046722becd51/keystone-cron/0.log" Dec 03 10:15:58 crc kubenswrapper[4576]: I1203 10:15:58.990799 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6b92a9ba-d2ef-48e7-8efe-a8cc1c31b6ba/kube-state-metrics/0.log" Dec 03 10:15:59 crc kubenswrapper[4576]: I1203 10:15:59.078058 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-69657bfb7-ncr7l_ffc7abfc-aecf-42de-8947-143cd7bda142/keystone-api/0.log" Dec 03 10:15:59 crc kubenswrapper[4576]: I1203 10:15:59.201769 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-sdtz9_af426bee-00a4-4c61-be68-87719bd4f285/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:59 crc kubenswrapper[4576]: I1203 10:15:59.798025 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-ksgl5_94db1892-a7dd-4a07-b181-fa2fbcffe2fc/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:15:59 crc kubenswrapper[4576]: I1203 10:15:59.831599 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f978d8b99-7d88p_cdfca795-30e9-4534-9084-e34e01ab71ae/neutron-httpd/0.log" Dec 03 10:16:00 crc kubenswrapper[4576]: I1203 10:16:00.077703 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f978d8b99-7d88p_cdfca795-30e9-4534-9084-e34e01ab71ae/neutron-api/0.log" Dec 03 10:16:00 crc kubenswrapper[4576]: I1203 10:16:00.902826 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_2ce21985-e2a9-48a8-bdca-ad4dc248ff98/nova-cell0-conductor-conductor/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.012630 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2be87e85-f004-483d-9faf-4f1dd3a050bd/nova-cell1-conductor-conductor/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.435039 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b02586c5-f716-48ea-bc4a-fefa33df684e/nova-api-log/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.575844 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_69cd46ae-0738-4d84-87a3-077751519dc4/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.851862 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-649lx_c7424b9a-5544-49a0-af69-fc3d308bf468/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.929023 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8dddc23a-4179-44b0-b145-a91ab3441703/nova-metadata-log/0.log" Dec 03 10:16:01 crc kubenswrapper[4576]: I1203 10:16:01.979577 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b02586c5-f716-48ea-bc4a-fefa33df684e/nova-api-api/0.log" Dec 03 10:16:02 crc kubenswrapper[4576]: I1203 10:16:02.600784 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/mysql-bootstrap/0.log" Dec 03 10:16:02 crc kubenswrapper[4576]: I1203 10:16:02.670781 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_60431290-c470-429d-938a-588668bb2887/nova-scheduler-scheduler/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.048797 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/galera/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.068083 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_83513275-c7cc-450e-9bca-79ca295b7906/mysql-bootstrap/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.350899 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/mysql-bootstrap/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.530335 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/mysql-bootstrap/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.633213 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_288e65b5-6608-4063-9996-eb5180ffbf0e/galera/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.859091 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_649a142a-4649-45fb-bdba-11fcc838bf97/openstackclient/0.log" Dec 03 10:16:03 crc kubenswrapper[4576]: I1203 10:16:03.997017 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-25phb_1f709485-8dc6-4e99-ba88-880d491fca2e/ovn-controller/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.127047 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8dddc23a-4179-44b0-b145-a91ab3441703/nova-metadata-metadata/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.212615 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-lxvbj_ac988c47-bfaa-4142-a15b-6c69acd494df/openstack-network-exporter/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.411748 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server-init/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.609487 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.650190 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovsdb-server-init/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.708132 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-pjp95_310fba87-b39f-4613-a373-54ecd21ed629/ovs-vswitchd/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.969484 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_636d191f-b7e2-4200-8dc3-5b0f386e2499/openstack-network-exporter/0.log" Dec 03 10:16:04 crc kubenswrapper[4576]: I1203 10:16:04.995084 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-6n2r7_d7aa650b-ed26-494b-bc5f-95320ad9be67/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:05 crc kubenswrapper[4576]: I1203 10:16:05.031001 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_636d191f-b7e2-4200-8dc3-5b0f386e2499/ovn-northd/0.log" Dec 03 10:16:05 crc kubenswrapper[4576]: I1203 10:16:05.361548 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_64d45188-c499-4626-bdd3-6f54a0ed3f14/openstack-network-exporter/0.log" Dec 03 10:16:05 crc kubenswrapper[4576]: I1203 10:16:05.537907 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_64d45188-c499-4626-bdd3-6f54a0ed3f14/ovsdbserver-nb/0.log" Dec 03 10:16:05 crc kubenswrapper[4576]: I1203 10:16:05.652852 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3a2bee80-63d4-41ae-97fc-54a96c4afc6e/ovsdbserver-sb/0.log" Dec 03 10:16:05 crc kubenswrapper[4576]: I1203 10:16:05.845017 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3a2bee80-63d4-41ae-97fc-54a96c4afc6e/openstack-network-exporter/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.021903 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f666f544-xj7sz_a1aaa45e-8e67-4360-b4db-2d2866d00865/placement-api/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.171377 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f666f544-xj7sz_a1aaa45e-8e67-4360-b4db-2d2866d00865/placement-log/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.249193 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/setup-container/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.429605 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/setup-container/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.622447 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_d0c9f8c4-e8c5-4033-ac50-305178e9010f/rabbitmq/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.643058 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/setup-container/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.915480 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/setup-container/0.log" Dec 03 10:16:06 crc kubenswrapper[4576]: I1203 10:16:06.932653 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_fcf046f8-36fd-4cd2-ab49-fbff8ff99fc9/rabbitmq/0.log" Dec 03 10:16:07 crc kubenswrapper[4576]: I1203 10:16:07.113801 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-2c4mf_96dfc3f2-bb7c-407d-8714-98a679e6d78e/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:07 crc kubenswrapper[4576]: I1203 10:16:07.244479 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-82clv_3fc3717e-aa96-497b-8d90-3c247a234d88/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:07 crc kubenswrapper[4576]: I1203 10:16:07.458161 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-86chp_fd4aa481-7064-4ebc-bc06-d706d427260d/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:07 crc kubenswrapper[4576]: I1203 10:16:07.603873 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-h2l89_9afda75e-55d1-4823-a4be-3c79bf36b3b2/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:07 crc kubenswrapper[4576]: I1203 10:16:07.777577 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-k8srx_2c30d1eb-aa4b-44e6-b424-dcdd12b23090/ssh-known-hosts-edpm-deployment/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.241601 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b677c5dc5-pfc4n_6bf8d1cf-0003-4e48-89f5-7ae1698f27ff/proxy-server/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.389237 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-b677c5dc5-pfc4n_6bf8d1cf-0003-4e48-89f5-7ae1698f27ff/proxy-httpd/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.403971 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-649tl_801bf024-9418-42e1-893f-0a4b82d411b4/swift-ring-rebalance/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.541451 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-auditor/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.646966 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-reaper/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.800331 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-replicator/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.833937 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-auditor/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.879092 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/account-server/0.log" Dec 03 10:16:08 crc kubenswrapper[4576]: I1203 10:16:08.943022 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-replicator/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.051373 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-server/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.194962 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-auditor/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.285481 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-expirer/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.334181 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/container-updater/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.399253 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-replicator/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.459222 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-server/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.589439 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/rsync/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.613308 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/object-updater/0.log" Dec 03 10:16:09 crc kubenswrapper[4576]: I1203 10:16:09.698774 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8e0694dc-a49e-4136-a206-3bb5c8acd48c/swift-recon-cron/0.log" Dec 03 10:16:10 crc kubenswrapper[4576]: I1203 10:16:10.199327 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_34096dc4-8175-4637-916a-9e52376b8c08/tempest-tests-tempest-tests-runner/0.log" Dec 03 10:16:10 crc kubenswrapper[4576]: I1203 10:16:10.298578 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-p9xpm_f432497e-88f4-424f-beb0-856c58fb586d/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:10 crc kubenswrapper[4576]: I1203 10:16:10.488947 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_d9e81811-e29e-4a52-a3d6-3ed997b86415/test-operator-logs-container/0.log" Dec 03 10:16:10 crc kubenswrapper[4576]: I1203 10:16:10.641565 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-lbmmg_da77edd1-65a1-4f59-a4d3-e57679ae6acf/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 10:16:24 crc kubenswrapper[4576]: I1203 10:16:24.329014 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c7c148cb-508f-45ed-a5ea-06b0b4bc51ff/memcached/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.208197 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.498298 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.532015 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.563847 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.742354 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/extract/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.816061 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/util/0.log" Dec 03 10:16:43 crc kubenswrapper[4576]: I1203 10:16:43.887999 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a962dhdt_e86979bd-954c-45c5-940d-f8e334277a44/pull/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.037991 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4mznn_3ea8e44c-f5b3-4b92-94ed-04954472481c/kube-rbac-proxy/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.153410 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4mznn_3ea8e44c-f5b3-4b92-94ed-04954472481c/manager/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.192601 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wqx97_cce29053-f3ed-4dce-a362-c99b4aa31102/kube-rbac-proxy/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.328985 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wqx97_cce29053-f3ed-4dce-a362-c99b4aa31102/manager/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.520977 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-n78hw_75840987-c6e3-45e6-912c-85771c498e41/kube-rbac-proxy/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.569899 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-n78hw_75840987-c6e3-45e6-912c-85771c498e41/manager/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.650359 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zghdb_df68290e-5853-4fce-903f-354ea9f740e1/kube-rbac-proxy/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.817620 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zghdb_df68290e-5853-4fce-903f-354ea9f740e1/manager/0.log" Dec 03 10:16:44 crc kubenswrapper[4576]: I1203 10:16:44.862958 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-nsggq_649283f6-ebcd-45a0-974f-e9c14138fa46/kube-rbac-proxy/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.027020 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-nsggq_649283f6-ebcd-45a0-974f-e9c14138fa46/manager/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.061211 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8g876_41505f0c-de81-41e5-b9e1-de8a17563b8d/kube-rbac-proxy/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.088766 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8g876_41505f0c-de81-41e5-b9e1-de8a17563b8d/manager/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.262855 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-4tr5z_f923e423-dcca-499c-8bf1-1c5d4288f20c/kube-rbac-proxy/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.485927 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-4tr5z_f923e423-dcca-499c-8bf1-1c5d4288f20c/manager/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.553566 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-49hnv_ac2346b5-8522-40bf-8083-15d06d8b9afd/kube-rbac-proxy/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.653315 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-49hnv_ac2346b5-8522-40bf-8083-15d06d8b9afd/manager/0.log" Dec 03 10:16:45 crc kubenswrapper[4576]: I1203 10:16:45.933660 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-64r8s_13d78877-8170-498d-bf0c-ab37fb799c83/kube-rbac-proxy/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.131976 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-64r8s_13d78877-8170-498d-bf0c-ab37fb799c83/manager/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.249675 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-w68kf_7c74d79c-0100-40b9-a363-434b817b0504/manager/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.294796 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-w68kf_7c74d79c-0100-40b9-a363-434b817b0504/kube-rbac-proxy/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.439060 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-42rgl_a3246ab9-76a7-41dd-9fcd-57323766f4f2/kube-rbac-proxy/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.536283 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-42rgl_a3246ab9-76a7-41dd-9fcd-57323766f4f2/manager/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.838728 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-jwqvx_7e7e8ece-f6be-41dc-be20-b82b844b4b83/kube-rbac-proxy/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.876432 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-jwqvx_7e7e8ece-f6be-41dc-be20-b82b844b4b83/manager/0.log" Dec 03 10:16:46 crc kubenswrapper[4576]: I1203 10:16:46.918430 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7mg4g_e697b8bb-b78f-4b0c-92e6-adde533c75b6/kube-rbac-proxy/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.178213 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7mg4g_e697b8bb-b78f-4b0c-92e6-adde533c75b6/manager/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.201082 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-rflv4_5280c7ee-cf95-4f36-a074-247880784343/kube-rbac-proxy/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.264646 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-rflv4_5280c7ee-cf95-4f36-a074-247880784343/manager/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.445863 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm_bad742c6-6ff9-4fe9-8a09-7d399b6d41de/kube-rbac-proxy/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.494224 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd42tjbm_bad742c6-6ff9-4fe9-8a09-7d399b6d41de/manager/0.log" Dec 03 10:16:47 crc kubenswrapper[4576]: I1203 10:16:47.969004 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6757ffd54f-4wfmb_b30928c7-4b2d-4fec-81f3-9237336e8d81/operator/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.144872 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-rwqb7_335bbabf-ec3b-484b-8081-08e919ec8dcd/registry-server/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.298001 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g9t26_c4445b62-9884-4667-96cd-ce531cc798c4/kube-rbac-proxy/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.390137 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g9t26_c4445b62-9884-4667-96cd-ce531cc798c4/manager/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.600153 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-tdbjg_8c321c82-4420-4b97-a16b-ce20c7ebcb15/manager/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.635257 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-665947b8b5-hr855_446e8b7b-0e54-4b76-b0c7-56ec1f779499/manager/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.668629 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-tdbjg_8c321c82-4420-4b97-a16b-ce20c7ebcb15/kube-rbac-proxy/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.940267 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-6mlkp_f6e0d66d-6dc0-461c-a5c5-8a1060b6b164/manager/0.log" Dec 03 10:16:48 crc kubenswrapper[4576]: I1203 10:16:48.979303 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qv7t2_e72f6251-8004-43cc-9bf2-80bc4b8d4431/operator/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.020320 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-6mlkp_f6e0d66d-6dc0-461c-a5c5-8a1060b6b164/kube-rbac-proxy/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.235618 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vb4nv_7d654424-85f1-4848-93f3-abb64297ce3b/manager/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.287389 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vb4nv_7d654424-85f1-4848-93f3-abb64297ce3b/kube-rbac-proxy/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.359831 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-ndz7z_3ce77fe1-0135-4043-9ebd-b7722db624d9/kube-rbac-proxy/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.425264 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-ndz7z_3ce77fe1-0135-4043-9ebd-b7722db624d9/manager/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.514189 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-4c8wv_8a7accfb-c3a2-4f70-906e-b2a3545eb88a/kube-rbac-proxy/0.log" Dec 03 10:16:49 crc kubenswrapper[4576]: I1203 10:16:49.609159 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-4c8wv_8a7accfb-c3a2-4f70-906e-b2a3545eb88a/manager/0.log" Dec 03 10:16:58 crc kubenswrapper[4576]: I1203 10:16:58.651719 4576 scope.go:117] "RemoveContainer" containerID="d3022c281a1aad7078b54ff5836d050ff6bb38aa868c2dd31192bf04b23458d1" Dec 03 10:16:58 crc kubenswrapper[4576]: I1203 10:16:58.679198 4576 scope.go:117] "RemoveContainer" containerID="87a6f47ea3f78efc15561b35b96dc8395b1ea1f5fbe254ac50a59ba4779c730b" Dec 03 10:16:58 crc kubenswrapper[4576]: I1203 10:16:58.703685 4576 scope.go:117] "RemoveContainer" containerID="2549e413fcead7089a10d9dce9b58d32a5497afbb2146791b952b917bd28ddc0" Dec 03 10:17:09 crc kubenswrapper[4576]: I1203 10:17:09.680967 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:17:09 crc kubenswrapper[4576]: I1203 10:17:09.681686 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:17:13 crc kubenswrapper[4576]: I1203 10:17:13.310378 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kbnz6_19be1a82-6279-466c-a719-e346d59597be/control-plane-machine-set-operator/0.log" Dec 03 10:17:13 crc kubenswrapper[4576]: I1203 10:17:13.558438 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nsf2s_bedc7336-f16c-485b-8cc8-13eea705d68a/machine-api-operator/0.log" Dec 03 10:17:13 crc kubenswrapper[4576]: I1203 10:17:13.588278 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nsf2s_bedc7336-f16c-485b-8cc8-13eea705d68a/kube-rbac-proxy/0.log" Dec 03 10:17:28 crc kubenswrapper[4576]: I1203 10:17:28.582191 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-ms2nw_5d5e47f6-494f-4fc7-a4c0-c12410e86da3/cert-manager-controller/0.log" Dec 03 10:17:28 crc kubenswrapper[4576]: I1203 10:17:28.843238 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-l45pw_59825354-6654-4c6a-be27-4d3b6f2a57c2/cert-manager-cainjector/0.log" Dec 03 10:17:28 crc kubenswrapper[4576]: I1203 10:17:28.920673 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-fjnrl_a1cefe8c-df93-4ed2-a334-c60ce9cc918d/cert-manager-webhook/0.log" Dec 03 10:17:39 crc kubenswrapper[4576]: I1203 10:17:39.680949 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:17:39 crc kubenswrapper[4576]: I1203 10:17:39.681570 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:17:47 crc kubenswrapper[4576]: I1203 10:17:47.907042 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-rfj5d_cb5b59ce-56be-4b48-902a-902dc9e7a707/nmstate-console-plugin/0.log" Dec 03 10:17:48 crc kubenswrapper[4576]: I1203 10:17:48.179846 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ll4ls_263f185a-e858-45d0-a61c-453056d0a98c/nmstate-handler/0.log" Dec 03 10:17:48 crc kubenswrapper[4576]: I1203 10:17:48.247589 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-h2g5z_4811e0da-b69c-436c-a2f9-1796a35b69ee/kube-rbac-proxy/0.log" Dec 03 10:17:48 crc kubenswrapper[4576]: I1203 10:17:48.325932 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-h2g5z_4811e0da-b69c-436c-a2f9-1796a35b69ee/nmstate-metrics/0.log" Dec 03 10:17:48 crc kubenswrapper[4576]: I1203 10:17:48.485103 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8ttll_fee161aa-5f26-41be-ba50-4b06079f597e/nmstate-operator/0.log" Dec 03 10:17:48 crc kubenswrapper[4576]: I1203 10:17:48.556158 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-889j7_208539d3-2c0f-4889-9239-c3dddd20ad3b/nmstate-webhook/0.log" Dec 03 10:17:58 crc kubenswrapper[4576]: I1203 10:17:58.805610 4576 scope.go:117] "RemoveContainer" containerID="5f352308c5791c4e5d32a91e72321be85c2d7ab232d96fb70f78f4f88659ef1d" Dec 03 10:17:58 crc kubenswrapper[4576]: I1203 10:17:58.832671 4576 scope.go:117] "RemoveContainer" containerID="89ed6475ce6defe712136e1db2c5efbd70af996bd48b7deedeb2081e6ed4ad37" Dec 03 10:17:58 crc kubenswrapper[4576]: I1203 10:17:58.858928 4576 scope.go:117] "RemoveContainer" containerID="bd6adf083773045372a62d0f05ceac9424285264fd100949507e0310631564ce" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.096346 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-9fpwm_35fce6e6-ceb3-4844-ad2e-fb7454c2e425/kube-rbac-proxy/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.119371 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-9fpwm_35fce6e6-ceb3-4844-ad2e-fb7454c2e425/controller/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.284583 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.458575 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.482279 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.517090 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.545237 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.752225 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.785607 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.797190 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.802667 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.966597 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-reloader/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.974302 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-frr-files/0.log" Dec 03 10:18:06 crc kubenswrapper[4576]: I1203 10:18:06.988225 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/cp-metrics/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.066399 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/controller/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.172863 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/frr-metrics/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.276182 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/kube-rbac-proxy/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.346502 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/kube-rbac-proxy-frr/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.499814 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/reloader/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.671294 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-rjsk6_ea79aacc-f31f-43cd-a0a4-151d9a4703e4/frr-k8s-webhook-server/0.log" Dec 03 10:18:07 crc kubenswrapper[4576]: I1203 10:18:07.796867 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6b55b5ccff-rm7kb_4641f310-7d84-4f47-8250-5551fc71ae77/manager/0.log" Dec 03 10:18:08 crc kubenswrapper[4576]: I1203 10:18:08.041816 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7fdfc49798-njl4b_8f7f1ce8-dc0b-4508-a9aa-2527f55973ff/webhook-server/0.log" Dec 03 10:18:08 crc kubenswrapper[4576]: I1203 10:18:08.313740 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8244f_7dc75631-a77b-4157-9abb-ba8ea06fb5dd/kube-rbac-proxy/0.log" Dec 03 10:18:08 crc kubenswrapper[4576]: I1203 10:18:08.633704 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jhd6k_f893c93f-566c-4c25-8e2b-48c3d73ca5fd/frr/0.log" Dec 03 10:18:08 crc kubenswrapper[4576]: I1203 10:18:08.832431 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8244f_7dc75631-a77b-4157-9abb-ba8ea06fb5dd/speaker/0.log" Dec 03 10:18:09 crc kubenswrapper[4576]: I1203 10:18:09.680394 4576 patch_prober.go:28] interesting pod/machine-config-daemon-pjb2d container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 10:18:09 crc kubenswrapper[4576]: I1203 10:18:09.680444 4576 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 10:18:09 crc kubenswrapper[4576]: I1203 10:18:09.687807 4576 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" Dec 03 10:18:09 crc kubenswrapper[4576]: I1203 10:18:09.688632 4576 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a"} pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 10:18:09 crc kubenswrapper[4576]: I1203 10:18:09.688698 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" containerName="machine-config-daemon" containerID="cri-o://7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" gracePeriod=600 Dec 03 10:18:10 crc kubenswrapper[4576]: I1203 10:18:10.489202 4576 generic.go:334] "Generic (PLEG): container finished" podID="60b1bede-26e9-4b5d-b450-9866da685693" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" exitCode=0 Dec 03 10:18:10 crc kubenswrapper[4576]: I1203 10:18:10.489243 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerDied","Data":"7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a"} Dec 03 10:18:10 crc kubenswrapper[4576]: I1203 10:18:10.489275 4576 scope.go:117] "RemoveContainer" containerID="3cf7e7748a7af8f7cf693de2b91428d6d2ee2b9f329c964a3b8f5c09e79a6a3c" Dec 03 10:18:10 crc kubenswrapper[4576]: E1203 10:18:10.744982 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:18:11 crc kubenswrapper[4576]: I1203 10:18:11.499377 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:18:11 crc kubenswrapper[4576]: E1203 10:18:11.500163 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:18:21 crc kubenswrapper[4576]: I1203 10:18:21.859719 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.102383 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.112153 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.123066 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.372633 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.405429 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/pull/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.415781 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fw4gv7_afe8f623-5aa0-4830-8144-e8f207db1316/extract/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.555046 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.775369 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.820052 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.823209 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.962433 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/util/0.log" Dec 03 10:18:22 crc kubenswrapper[4576]: I1203 10:18:22.983480 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/pull/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.046372 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83knfwc_059fa16e-ef61-475d-927a-8b6fe7ed5c81/extract/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.175461 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.435901 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.455458 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.481183 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.658257 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-content/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.662835 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/extract-utilities/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.935419 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:18:23 crc kubenswrapper[4576]: I1203 10:18:23.964697 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nl2dd_2fde46f0-4823-4c59-bd4d-31b63c794d13/registry-server/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.172039 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.178258 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.209316 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.392685 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-content/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.412044 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/extract-utilities/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.498883 4576 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6f978d8b99-7d88p" podUID="cdfca795-30e9-4534-9084-e34e01ab71ae" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.716274 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-j26qz_c981f304-77aa-443d-8af7-3d665a32e754/marketplace-operator/0.log" Dec 03 10:18:24 crc kubenswrapper[4576]: I1203 10:18:24.859370 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.097579 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.167512 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.190643 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wgbb8_576695bd-064a-4fc2-8aa2-ba863892d1bb/registry-server/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.204166 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.389570 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-utilities/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.425222 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/extract-content/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.596188 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pv99n_3fb60644-2f82-4e25-8121-0a9898ec0aa9/registry-server/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.667747 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.859322 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.869921 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:18:25 crc kubenswrapper[4576]: I1203 10:18:25.916759 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:18:26 crc kubenswrapper[4576]: I1203 10:18:26.041224 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-content/0.log" Dec 03 10:18:26 crc kubenswrapper[4576]: I1203 10:18:26.096254 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/extract-utilities/0.log" Dec 03 10:18:26 crc kubenswrapper[4576]: I1203 10:18:26.676826 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:18:26 crc kubenswrapper[4576]: E1203 10:18:26.677064 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:18:26 crc kubenswrapper[4576]: I1203 10:18:26.701902 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b78nr_15f066ad-3c0d-409b-9c47-e9a36cf6660d/registry-server/0.log" Dec 03 10:18:37 crc kubenswrapper[4576]: I1203 10:18:37.679308 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:18:37 crc kubenswrapper[4576]: E1203 10:18:37.680159 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.850601 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:39 crc kubenswrapper[4576]: E1203 10:18:39.852913 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b7c679-bb03-44e7-b3bb-583603f506f5" containerName="container-00" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.853054 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b7c679-bb03-44e7-b3bb-583603f506f5" containerName="container-00" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.853473 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b7c679-bb03-44e7-b3bb-583603f506f5" containerName="container-00" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.855980 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.885463 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.994645 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh757\" (UniqueName: \"kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.994711 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:39 crc kubenswrapper[4576]: I1203 10:18:39.994893 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.096048 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh757\" (UniqueName: \"kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.096379 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.096495 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.096827 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.096892 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.117549 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh757\" (UniqueName: \"kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757\") pod \"redhat-marketplace-htzhb\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.189916 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.681609 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:40 crc kubenswrapper[4576]: I1203 10:18:40.796282 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerStarted","Data":"ac7fabe18d6df0bc0a8cf4bcfe7dcc61d0029f6ea4b6ab3e8ad9c905c954636f"} Dec 03 10:18:41 crc kubenswrapper[4576]: I1203 10:18:41.807194 4576 generic.go:334] "Generic (PLEG): container finished" podID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerID="00d3954d578416e3523622850463bea7a748f4d32dc642587a2fc70506b5a6f0" exitCode=0 Dec 03 10:18:41 crc kubenswrapper[4576]: I1203 10:18:41.807483 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerDied","Data":"00d3954d578416e3523622850463bea7a748f4d32dc642587a2fc70506b5a6f0"} Dec 03 10:18:41 crc kubenswrapper[4576]: I1203 10:18:41.810550 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 10:18:43 crc kubenswrapper[4576]: I1203 10:18:43.841220 4576 generic.go:334] "Generic (PLEG): container finished" podID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerID="1fa2941408fe00a4cae4dc1802166de7ff54b2c438b33737a777228615783f26" exitCode=0 Dec 03 10:18:43 crc kubenswrapper[4576]: I1203 10:18:43.841297 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerDied","Data":"1fa2941408fe00a4cae4dc1802166de7ff54b2c438b33737a777228615783f26"} Dec 03 10:18:46 crc kubenswrapper[4576]: I1203 10:18:46.871674 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerStarted","Data":"34d6dd2599488a51eade624915111609b7c75a3a7486311be6751486aa5d323a"} Dec 03 10:18:46 crc kubenswrapper[4576]: I1203 10:18:46.908634 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-htzhb" podStartSLOduration=3.942072712 podStartE2EDuration="7.908606854s" podCreationTimestamp="2025-12-03 10:18:39 +0000 UTC" firstStartedPulling="2025-12-03 10:18:41.810233804 +0000 UTC m=+5929.196210788" lastFinishedPulling="2025-12-03 10:18:45.776767946 +0000 UTC m=+5933.162744930" observedRunningTime="2025-12-03 10:18:46.895611034 +0000 UTC m=+5934.281588018" watchObservedRunningTime="2025-12-03 10:18:46.908606854 +0000 UTC m=+5934.294583838" Dec 03 10:18:50 crc kubenswrapper[4576]: I1203 10:18:50.190904 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:50 crc kubenswrapper[4576]: I1203 10:18:50.191375 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:50 crc kubenswrapper[4576]: I1203 10:18:50.259037 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:50 crc kubenswrapper[4576]: I1203 10:18:50.978795 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:51 crc kubenswrapper[4576]: I1203 10:18:51.066110 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:52 crc kubenswrapper[4576]: I1203 10:18:52.678164 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:18:52 crc kubenswrapper[4576]: E1203 10:18:52.678646 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:18:52 crc kubenswrapper[4576]: I1203 10:18:52.923639 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-htzhb" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="registry-server" containerID="cri-o://34d6dd2599488a51eade624915111609b7c75a3a7486311be6751486aa5d323a" gracePeriod=2 Dec 03 10:18:53 crc kubenswrapper[4576]: I1203 10:18:53.952159 4576 generic.go:334] "Generic (PLEG): container finished" podID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerID="34d6dd2599488a51eade624915111609b7c75a3a7486311be6751486aa5d323a" exitCode=0 Dec 03 10:18:53 crc kubenswrapper[4576]: I1203 10:18:53.952206 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerDied","Data":"34d6dd2599488a51eade624915111609b7c75a3a7486311be6751486aa5d323a"} Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.125425 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.284872 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content\") pod \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.285420 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities\") pod \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.285472 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qh757\" (UniqueName: \"kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757\") pod \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\" (UID: \"f7c5fcc8-860d-42f8-be07-35176d2efb3f\") " Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.287301 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities" (OuterVolumeSpecName: "utilities") pod "f7c5fcc8-860d-42f8-be07-35176d2efb3f" (UID: "f7c5fcc8-860d-42f8-be07-35176d2efb3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.306637 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757" (OuterVolumeSpecName: "kube-api-access-qh757") pod "f7c5fcc8-860d-42f8-be07-35176d2efb3f" (UID: "f7c5fcc8-860d-42f8-be07-35176d2efb3f"). InnerVolumeSpecName "kube-api-access-qh757". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.308460 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7c5fcc8-860d-42f8-be07-35176d2efb3f" (UID: "f7c5fcc8-860d-42f8-be07-35176d2efb3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.388489 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.388705 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7c5fcc8-860d-42f8-be07-35176d2efb3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.388722 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qh757\" (UniqueName: \"kubernetes.io/projected/f7c5fcc8-860d-42f8-be07-35176d2efb3f-kube-api-access-qh757\") on node \"crc\" DevicePath \"\"" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.963111 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-htzhb" event={"ID":"f7c5fcc8-860d-42f8-be07-35176d2efb3f","Type":"ContainerDied","Data":"ac7fabe18d6df0bc0a8cf4bcfe7dcc61d0029f6ea4b6ab3e8ad9c905c954636f"} Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.963194 4576 scope.go:117] "RemoveContainer" containerID="34d6dd2599488a51eade624915111609b7c75a3a7486311be6751486aa5d323a" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.963198 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-htzhb" Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.995719 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:54 crc kubenswrapper[4576]: I1203 10:18:54.997356 4576 scope.go:117] "RemoveContainer" containerID="1fa2941408fe00a4cae4dc1802166de7ff54b2c438b33737a777228615783f26" Dec 03 10:18:55 crc kubenswrapper[4576]: I1203 10:18:55.010684 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-htzhb"] Dec 03 10:18:55 crc kubenswrapper[4576]: I1203 10:18:55.046710 4576 scope.go:117] "RemoveContainer" containerID="00d3954d578416e3523622850463bea7a748f4d32dc642587a2fc70506b5a6f0" Dec 03 10:18:55 crc kubenswrapper[4576]: I1203 10:18:55.687095 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" path="/var/lib/kubelet/pods/f7c5fcc8-860d-42f8-be07-35176d2efb3f/volumes" Dec 03 10:19:04 crc kubenswrapper[4576]: I1203 10:19:04.678123 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:19:04 crc kubenswrapper[4576]: E1203 10:19:04.678931 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:19:17 crc kubenswrapper[4576]: I1203 10:19:17.677910 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:19:17 crc kubenswrapper[4576]: E1203 10:19:17.678695 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:19:28 crc kubenswrapper[4576]: I1203 10:19:28.677802 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:19:28 crc kubenswrapper[4576]: E1203 10:19:28.678658 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:19:40 crc kubenswrapper[4576]: I1203 10:19:40.679595 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:19:40 crc kubenswrapper[4576]: E1203 10:19:40.680335 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:19:55 crc kubenswrapper[4576]: I1203 10:19:55.682604 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:19:55 crc kubenswrapper[4576]: E1203 10:19:55.683228 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:20:10 crc kubenswrapper[4576]: I1203 10:20:10.678731 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:20:10 crc kubenswrapper[4576]: E1203 10:20:10.679747 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:20:25 crc kubenswrapper[4576]: I1203 10:20:25.681644 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:20:25 crc kubenswrapper[4576]: E1203 10:20:25.682499 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.421748 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:35 crc kubenswrapper[4576]: E1203 10:20:35.422842 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="extract-utilities" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.422863 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="extract-utilities" Dec 03 10:20:35 crc kubenswrapper[4576]: E1203 10:20:35.422881 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="extract-content" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.422890 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="extract-content" Dec 03 10:20:35 crc kubenswrapper[4576]: E1203 10:20:35.422922 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="registry-server" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.422930 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="registry-server" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.423159 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7c5fcc8-860d-42f8-be07-35176d2efb3f" containerName="registry-server" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.425217 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.437099 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.618728 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75p5n\" (UniqueName: \"kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.618772 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.618814 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.720645 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75p5n\" (UniqueName: \"kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.720940 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.720981 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.721508 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.721676 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:35 crc kubenswrapper[4576]: I1203 10:20:35.757944 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75p5n\" (UniqueName: \"kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n\") pod \"community-operators-l9xct\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:36 crc kubenswrapper[4576]: I1203 10:20:36.050088 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:36 crc kubenswrapper[4576]: I1203 10:20:36.588734 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:37 crc kubenswrapper[4576]: I1203 10:20:37.359173 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerStarted","Data":"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831"} Dec 03 10:20:37 crc kubenswrapper[4576]: I1203 10:20:37.359792 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerStarted","Data":"49c50aa131232c17bfc53da4af7d8a23444611069d3d4965814f2f3f5d923b5a"} Dec 03 10:20:38 crc kubenswrapper[4576]: I1203 10:20:38.373779 4576 generic.go:334] "Generic (PLEG): container finished" podID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerID="a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831" exitCode=0 Dec 03 10:20:38 crc kubenswrapper[4576]: I1203 10:20:38.373859 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerDied","Data":"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831"} Dec 03 10:20:38 crc kubenswrapper[4576]: I1203 10:20:38.678442 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:20:38 crc kubenswrapper[4576]: E1203 10:20:38.679029 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:20:40 crc kubenswrapper[4576]: I1203 10:20:40.397390 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerStarted","Data":"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd"} Dec 03 10:20:41 crc kubenswrapper[4576]: I1203 10:20:41.410645 4576 generic.go:334] "Generic (PLEG): container finished" podID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerID="14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd" exitCode=0 Dec 03 10:20:41 crc kubenswrapper[4576]: I1203 10:20:41.410713 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerDied","Data":"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd"} Dec 03 10:20:43 crc kubenswrapper[4576]: I1203 10:20:43.429366 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerStarted","Data":"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e"} Dec 03 10:20:43 crc kubenswrapper[4576]: I1203 10:20:43.460019 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l9xct" podStartSLOduration=4.260823141 podStartE2EDuration="8.459990328s" podCreationTimestamp="2025-12-03 10:20:35 +0000 UTC" firstStartedPulling="2025-12-03 10:20:38.377062724 +0000 UTC m=+6045.763039718" lastFinishedPulling="2025-12-03 10:20:42.576229921 +0000 UTC m=+6049.962206905" observedRunningTime="2025-12-03 10:20:43.452722512 +0000 UTC m=+6050.838699496" watchObservedRunningTime="2025-12-03 10:20:43.459990328 +0000 UTC m=+6050.845967322" Dec 03 10:20:46 crc kubenswrapper[4576]: I1203 10:20:46.052222 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:46 crc kubenswrapper[4576]: I1203 10:20:46.052803 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:46 crc kubenswrapper[4576]: I1203 10:20:46.132129 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:49 crc kubenswrapper[4576]: I1203 10:20:49.530209 4576 generic.go:334] "Generic (PLEG): container finished" podID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerID="6370799c1ec6c212111c1db9a9fdab87fa28e0cb68470107ac49f67f67e7de3c" exitCode=0 Dec 03 10:20:49 crc kubenswrapper[4576]: I1203 10:20:49.530356 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" event={"ID":"4e72ede7-9d4a-4ece-a2e0-72131da68420","Type":"ContainerDied","Data":"6370799c1ec6c212111c1db9a9fdab87fa28e0cb68470107ac49f67f67e7de3c"} Dec 03 10:20:49 crc kubenswrapper[4576]: I1203 10:20:49.531812 4576 scope.go:117] "RemoveContainer" containerID="6370799c1ec6c212111c1db9a9fdab87fa28e0cb68470107ac49f67f67e7de3c" Dec 03 10:20:49 crc kubenswrapper[4576]: I1203 10:20:49.685330 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8vjbs_must-gather-gp9vq_4e72ede7-9d4a-4ece-a2e0-72131da68420/gather/0.log" Dec 03 10:20:53 crc kubenswrapper[4576]: I1203 10:20:53.684439 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:20:53 crc kubenswrapper[4576]: E1203 10:20:53.685262 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:20:56 crc kubenswrapper[4576]: I1203 10:20:56.234485 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:56 crc kubenswrapper[4576]: I1203 10:20:56.319771 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:56 crc kubenswrapper[4576]: I1203 10:20:56.598977 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l9xct" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="registry-server" containerID="cri-o://02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e" gracePeriod=2 Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.581750 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.626181 4576 generic.go:334] "Generic (PLEG): container finished" podID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerID="02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e" exitCode=0 Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.626233 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerDied","Data":"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e"} Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.626264 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l9xct" event={"ID":"aadd7133-770d-4d6f-aaf4-dc7778fc0b53","Type":"ContainerDied","Data":"49c50aa131232c17bfc53da4af7d8a23444611069d3d4965814f2f3f5d923b5a"} Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.626305 4576 scope.go:117] "RemoveContainer" containerID="02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.626471 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l9xct" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.730264 4576 scope.go:117] "RemoveContainer" containerID="14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.754926 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities" (OuterVolumeSpecName: "utilities") pod "aadd7133-770d-4d6f-aaf4-dc7778fc0b53" (UID: "aadd7133-770d-4d6f-aaf4-dc7778fc0b53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.753001 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities\") pod \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.755056 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75p5n\" (UniqueName: \"kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n\") pod \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.755229 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content\") pod \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\" (UID: \"aadd7133-770d-4d6f-aaf4-dc7778fc0b53\") " Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.755740 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.762883 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n" (OuterVolumeSpecName: "kube-api-access-75p5n") pod "aadd7133-770d-4d6f-aaf4-dc7778fc0b53" (UID: "aadd7133-770d-4d6f-aaf4-dc7778fc0b53"). InnerVolumeSpecName "kube-api-access-75p5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.763877 4576 scope.go:117] "RemoveContainer" containerID="a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.804802 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aadd7133-770d-4d6f-aaf4-dc7778fc0b53" (UID: "aadd7133-770d-4d6f-aaf4-dc7778fc0b53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.851461 4576 scope.go:117] "RemoveContainer" containerID="02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e" Dec 03 10:20:57 crc kubenswrapper[4576]: E1203 10:20:57.855296 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e\": container with ID starting with 02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e not found: ID does not exist" containerID="02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.855342 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e"} err="failed to get container status \"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e\": rpc error: code = NotFound desc = could not find container \"02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e\": container with ID starting with 02a4fed9da0c7ebf1f804af14c2c91fd76a74728be6a40c65ae9f5ac32ace83e not found: ID does not exist" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.855369 4576 scope.go:117] "RemoveContainer" containerID="14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd" Dec 03 10:20:57 crc kubenswrapper[4576]: E1203 10:20:57.856029 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd\": container with ID starting with 14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd not found: ID does not exist" containerID="14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.856070 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd"} err="failed to get container status \"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd\": rpc error: code = NotFound desc = could not find container \"14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd\": container with ID starting with 14c3a3ddd6ae59b3648ddb0081d39ceeb0a3b9bdaa3c4096e6e151a5a180ddbd not found: ID does not exist" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.856095 4576 scope.go:117] "RemoveContainer" containerID="a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831" Dec 03 10:20:57 crc kubenswrapper[4576]: E1203 10:20:57.856595 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831\": container with ID starting with a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831 not found: ID does not exist" containerID="a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.856623 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831"} err="failed to get container status \"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831\": rpc error: code = NotFound desc = could not find container \"a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831\": container with ID starting with a8a497a31f6f0c7f5e65d8dd983002dbe9e2eb938c0ccf822bee15e2ab342831 not found: ID does not exist" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.858075 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.858110 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75p5n\" (UniqueName: \"kubernetes.io/projected/aadd7133-770d-4d6f-aaf4-dc7778fc0b53-kube-api-access-75p5n\") on node \"crc\" DevicePath \"\"" Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.966590 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:57 crc kubenswrapper[4576]: I1203 10:20:57.981060 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l9xct"] Dec 03 10:20:59 crc kubenswrapper[4576]: I1203 10:20:59.025096 4576 scope.go:117] "RemoveContainer" containerID="91fd6392d4dc13f4b14280a98893e10b8e3d3444a857b4057cf40e969d852e75" Dec 03 10:20:59 crc kubenswrapper[4576]: I1203 10:20:59.687032 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" path="/var/lib/kubelet/pods/aadd7133-770d-4d6f-aaf4-dc7778fc0b53/volumes" Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.518179 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8vjbs/must-gather-gp9vq"] Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.518936 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="copy" containerID="cri-o://1ce2798a9833b85f18cd451299b9bab47e87608023f5218f96831ba210852239" gracePeriod=2 Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.527246 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8vjbs/must-gather-gp9vq"] Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.677970 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8vjbs_must-gather-gp9vq_4e72ede7-9d4a-4ece-a2e0-72131da68420/copy/0.log" Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.678638 4576 generic.go:334] "Generic (PLEG): container finished" podID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerID="1ce2798a9833b85f18cd451299b9bab47e87608023f5218f96831ba210852239" exitCode=143 Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.968783 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8vjbs_must-gather-gp9vq_4e72ede7-9d4a-4ece-a2e0-72131da68420/copy/0.log" Dec 03 10:21:02 crc kubenswrapper[4576]: I1203 10:21:02.969167 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.062848 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output\") pod \"4e72ede7-9d4a-4ece-a2e0-72131da68420\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.063092 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8mgh\" (UniqueName: \"kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh\") pod \"4e72ede7-9d4a-4ece-a2e0-72131da68420\" (UID: \"4e72ede7-9d4a-4ece-a2e0-72131da68420\") " Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.069977 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh" (OuterVolumeSpecName: "kube-api-access-z8mgh") pod "4e72ede7-9d4a-4ece-a2e0-72131da68420" (UID: "4e72ede7-9d4a-4ece-a2e0-72131da68420"). InnerVolumeSpecName "kube-api-access-z8mgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.165445 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8mgh\" (UniqueName: \"kubernetes.io/projected/4e72ede7-9d4a-4ece-a2e0-72131da68420-kube-api-access-z8mgh\") on node \"crc\" DevicePath \"\"" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.258452 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4e72ede7-9d4a-4ece-a2e0-72131da68420" (UID: "4e72ede7-9d4a-4ece-a2e0-72131da68420"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.267519 4576 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4e72ede7-9d4a-4ece-a2e0-72131da68420-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.687980 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" path="/var/lib/kubelet/pods/4e72ede7-9d4a-4ece-a2e0-72131da68420/volumes" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.690922 4576 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8vjbs_must-gather-gp9vq_4e72ede7-9d4a-4ece-a2e0-72131da68420/copy/0.log" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.691302 4576 scope.go:117] "RemoveContainer" containerID="1ce2798a9833b85f18cd451299b9bab47e87608023f5218f96831ba210852239" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.691365 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8vjbs/must-gather-gp9vq" Dec 03 10:21:03 crc kubenswrapper[4576]: I1203 10:21:03.711983 4576 scope.go:117] "RemoveContainer" containerID="6370799c1ec6c212111c1db9a9fdab87fa28e0cb68470107ac49f67f67e7de3c" Dec 03 10:21:06 crc kubenswrapper[4576]: I1203 10:21:06.678747 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:21:06 crc kubenswrapper[4576]: E1203 10:21:06.679593 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:21:21 crc kubenswrapper[4576]: I1203 10:21:21.679293 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:21:21 crc kubenswrapper[4576]: E1203 10:21:21.682198 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:21:33 crc kubenswrapper[4576]: I1203 10:21:33.687901 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:21:33 crc kubenswrapper[4576]: E1203 10:21:33.689414 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:21:47 crc kubenswrapper[4576]: I1203 10:21:47.677516 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:21:47 crc kubenswrapper[4576]: E1203 10:21:47.678257 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:21:59 crc kubenswrapper[4576]: I1203 10:21:59.104213 4576 scope.go:117] "RemoveContainer" containerID="f38a90c5d0badddf3862f2449c0df8497e483848689bd37718992b1e094367c4" Dec 03 10:22:00 crc kubenswrapper[4576]: I1203 10:22:00.677715 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:22:00 crc kubenswrapper[4576]: E1203 10:22:00.678411 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:22:11 crc kubenswrapper[4576]: I1203 10:22:11.680717 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:22:11 crc kubenswrapper[4576]: E1203 10:22:11.681543 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:22:22 crc kubenswrapper[4576]: I1203 10:22:22.677852 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:22:22 crc kubenswrapper[4576]: E1203 10:22:22.679207 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:22:35 crc kubenswrapper[4576]: I1203 10:22:35.678498 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:22:35 crc kubenswrapper[4576]: E1203 10:22:35.679350 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:22:50 crc kubenswrapper[4576]: I1203 10:22:50.678182 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:22:50 crc kubenswrapper[4576]: E1203 10:22:50.678963 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.606253 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:22:53 crc kubenswrapper[4576]: E1203 10:22:53.607356 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="gather" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607374 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="gather" Dec 03 10:22:53 crc kubenswrapper[4576]: E1203 10:22:53.607394 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="extract-content" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607403 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="extract-content" Dec 03 10:22:53 crc kubenswrapper[4576]: E1203 10:22:53.607435 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="copy" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607446 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="copy" Dec 03 10:22:53 crc kubenswrapper[4576]: E1203 10:22:53.607461 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="extract-utilities" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607470 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="extract-utilities" Dec 03 10:22:53 crc kubenswrapper[4576]: E1203 10:22:53.607500 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="registry-server" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607508 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="registry-server" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607795 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="copy" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607835 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="aadd7133-770d-4d6f-aaf4-dc7778fc0b53" containerName="registry-server" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.607846 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e72ede7-9d4a-4ece-a2e0-72131da68420" containerName="gather" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.609556 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.642220 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.705263 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.705324 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krrbl\" (UniqueName: \"kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.705542 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.807740 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.807844 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krrbl\" (UniqueName: \"kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.807909 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.808221 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.809423 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.849154 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krrbl\" (UniqueName: \"kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl\") pod \"certified-operators-t79kd\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:53 crc kubenswrapper[4576]: I1203 10:22:53.932655 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:22:54 crc kubenswrapper[4576]: I1203 10:22:54.429643 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:22:54 crc kubenswrapper[4576]: I1203 10:22:54.887260 4576 generic.go:334] "Generic (PLEG): container finished" podID="5f8c0283-20b4-494d-96f3-275136528e26" containerID="769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b" exitCode=0 Dec 03 10:22:54 crc kubenswrapper[4576]: I1203 10:22:54.887333 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerDied","Data":"769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b"} Dec 03 10:22:54 crc kubenswrapper[4576]: I1203 10:22:54.887375 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerStarted","Data":"e90f9740054e870a2a048d6d86a9846e872701978b61175086f9a8344de9b09d"} Dec 03 10:22:56 crc kubenswrapper[4576]: I1203 10:22:56.906129 4576 generic.go:334] "Generic (PLEG): container finished" podID="5f8c0283-20b4-494d-96f3-275136528e26" containerID="f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627" exitCode=0 Dec 03 10:22:56 crc kubenswrapper[4576]: I1203 10:22:56.906225 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerDied","Data":"f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627"} Dec 03 10:22:57 crc kubenswrapper[4576]: I1203 10:22:57.920569 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerStarted","Data":"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72"} Dec 03 10:22:57 crc kubenswrapper[4576]: I1203 10:22:57.946568 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t79kd" podStartSLOduration=2.544296534 podStartE2EDuration="4.946520681s" podCreationTimestamp="2025-12-03 10:22:53 +0000 UTC" firstStartedPulling="2025-12-03 10:22:54.891665629 +0000 UTC m=+6182.277642663" lastFinishedPulling="2025-12-03 10:22:57.293889836 +0000 UTC m=+6184.679866810" observedRunningTime="2025-12-03 10:22:57.944634491 +0000 UTC m=+6185.330611495" watchObservedRunningTime="2025-12-03 10:22:57.946520681 +0000 UTC m=+6185.332497665" Dec 03 10:23:02 crc kubenswrapper[4576]: I1203 10:23:02.677595 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:23:02 crc kubenswrapper[4576]: E1203 10:23:02.678409 4576 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pjb2d_openshift-machine-config-operator(60b1bede-26e9-4b5d-b450-9866da685693)\"" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" podUID="60b1bede-26e9-4b5d-b450-9866da685693" Dec 03 10:23:03 crc kubenswrapper[4576]: I1203 10:23:03.933924 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:03 crc kubenswrapper[4576]: I1203 10:23:03.933982 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:03 crc kubenswrapper[4576]: I1203 10:23:03.982068 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:04 crc kubenswrapper[4576]: I1203 10:23:04.051449 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:04 crc kubenswrapper[4576]: I1203 10:23:04.222450 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:23:05 crc kubenswrapper[4576]: I1203 10:23:05.993923 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t79kd" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="registry-server" containerID="cri-o://e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72" gracePeriod=2 Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.524790 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.571431 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities\") pod \"5f8c0283-20b4-494d-96f3-275136528e26\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.572212 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krrbl\" (UniqueName: \"kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl\") pod \"5f8c0283-20b4-494d-96f3-275136528e26\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.572768 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities" (OuterVolumeSpecName: "utilities") pod "5f8c0283-20b4-494d-96f3-275136528e26" (UID: "5f8c0283-20b4-494d-96f3-275136528e26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.572895 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content\") pod \"5f8c0283-20b4-494d-96f3-275136528e26\" (UID: \"5f8c0283-20b4-494d-96f3-275136528e26\") " Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.574696 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.586904 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl" (OuterVolumeSpecName: "kube-api-access-krrbl") pod "5f8c0283-20b4-494d-96f3-275136528e26" (UID: "5f8c0283-20b4-494d-96f3-275136528e26"). InnerVolumeSpecName "kube-api-access-krrbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:23:06 crc kubenswrapper[4576]: I1203 10:23:06.676349 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krrbl\" (UniqueName: \"kubernetes.io/projected/5f8c0283-20b4-494d-96f3-275136528e26-kube-api-access-krrbl\") on node \"crc\" DevicePath \"\"" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.009043 4576 generic.go:334] "Generic (PLEG): container finished" podID="5f8c0283-20b4-494d-96f3-275136528e26" containerID="e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72" exitCode=0 Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.009092 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerDied","Data":"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72"} Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.009116 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t79kd" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.009136 4576 scope.go:117] "RemoveContainer" containerID="e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.009123 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t79kd" event={"ID":"5f8c0283-20b4-494d-96f3-275136528e26","Type":"ContainerDied","Data":"e90f9740054e870a2a048d6d86a9846e872701978b61175086f9a8344de9b09d"} Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.034910 4576 scope.go:117] "RemoveContainer" containerID="f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.063690 4576 scope.go:117] "RemoveContainer" containerID="769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.109172 4576 scope.go:117] "RemoveContainer" containerID="e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72" Dec 03 10:23:07 crc kubenswrapper[4576]: E1203 10:23:07.110018 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72\": container with ID starting with e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72 not found: ID does not exist" containerID="e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.110065 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72"} err="failed to get container status \"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72\": rpc error: code = NotFound desc = could not find container \"e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72\": container with ID starting with e21a111fd81424e484bbb12a04ade1b5714d5b7d1eadb5372f0fcad268728f72 not found: ID does not exist" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.110096 4576 scope.go:117] "RemoveContainer" containerID="f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627" Dec 03 10:23:07 crc kubenswrapper[4576]: E1203 10:23:07.110436 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627\": container with ID starting with f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627 not found: ID does not exist" containerID="f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.110464 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627"} err="failed to get container status \"f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627\": rpc error: code = NotFound desc = could not find container \"f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627\": container with ID starting with f222b17f8fb1139b4cc8d09833a08c6ff754e2bb877d27b2528936e42f642627 not found: ID does not exist" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.110483 4576 scope.go:117] "RemoveContainer" containerID="769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b" Dec 03 10:23:07 crc kubenswrapper[4576]: E1203 10:23:07.110782 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b\": container with ID starting with 769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b not found: ID does not exist" containerID="769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.110807 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b"} err="failed to get container status \"769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b\": rpc error: code = NotFound desc = could not find container \"769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b\": container with ID starting with 769a52666cda443e7a36fe1e473a58bc6fad0bfaeaf66839cecd9e1330298b8b not found: ID does not exist" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.135565 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f8c0283-20b4-494d-96f3-275136528e26" (UID: "5f8c0283-20b4-494d-96f3-275136528e26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.185903 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f8c0283-20b4-494d-96f3-275136528e26-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.347970 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.361508 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t79kd"] Dec 03 10:23:07 crc kubenswrapper[4576]: I1203 10:23:07.693499 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f8c0283-20b4-494d-96f3-275136528e26" path="/var/lib/kubelet/pods/5f8c0283-20b4-494d-96f3-275136528e26/volumes" Dec 03 10:23:17 crc kubenswrapper[4576]: I1203 10:23:17.679744 4576 scope.go:117] "RemoveContainer" containerID="7702a114a87e2a5a796edf3c4a450662bfd88a4ff9af32af69c6458ff530891a" Dec 03 10:23:18 crc kubenswrapper[4576]: I1203 10:23:18.136567 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pjb2d" event={"ID":"60b1bede-26e9-4b5d-b450-9866da685693","Type":"ContainerStarted","Data":"886fd2d01f73f5561a26071ffd83ecee3927015c2f732e46ad851dd4afad905d"} Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.538095 4576 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:11 crc kubenswrapper[4576]: E1203 10:24:11.539043 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="extract-utilities" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.539056 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="extract-utilities" Dec 03 10:24:11 crc kubenswrapper[4576]: E1203 10:24:11.539085 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="registry-server" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.539095 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="registry-server" Dec 03 10:24:11 crc kubenswrapper[4576]: E1203 10:24:11.539115 4576 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="extract-content" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.539122 4576 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="extract-content" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.539333 4576 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8c0283-20b4-494d-96f3-275136528e26" containerName="registry-server" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.540781 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.549633 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.595838 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.596274 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.596302 4576 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwvbn\" (UniqueName: \"kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.697832 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.698012 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.698053 4576 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwvbn\" (UniqueName: \"kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.699092 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.699212 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.727145 4576 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwvbn\" (UniqueName: \"kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn\") pod \"redhat-operators-kxphd\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:11 crc kubenswrapper[4576]: I1203 10:24:11.860386 4576 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:12 crc kubenswrapper[4576]: I1203 10:24:12.354420 4576 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:12 crc kubenswrapper[4576]: I1203 10:24:12.752651 4576 generic.go:334] "Generic (PLEG): container finished" podID="d0fe4920-4f32-424e-a9bd-467aa3689506" containerID="a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0" exitCode=0 Dec 03 10:24:12 crc kubenswrapper[4576]: I1203 10:24:12.752862 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerDied","Data":"a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0"} Dec 03 10:24:12 crc kubenswrapper[4576]: I1203 10:24:12.752982 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerStarted","Data":"1480de08bfc1b91803a57790119b772c83ba6df8d97b9a5eb8affe510782f36f"} Dec 03 10:24:12 crc kubenswrapper[4576]: I1203 10:24:12.756573 4576 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 10:24:13 crc kubenswrapper[4576]: I1203 10:24:13.764070 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerStarted","Data":"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca"} Dec 03 10:24:17 crc kubenswrapper[4576]: I1203 10:24:17.808391 4576 generic.go:334] "Generic (PLEG): container finished" podID="d0fe4920-4f32-424e-a9bd-467aa3689506" containerID="ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca" exitCode=0 Dec 03 10:24:17 crc kubenswrapper[4576]: I1203 10:24:17.808513 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerDied","Data":"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca"} Dec 03 10:24:18 crc kubenswrapper[4576]: I1203 10:24:18.855221 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerStarted","Data":"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd"} Dec 03 10:24:18 crc kubenswrapper[4576]: I1203 10:24:18.897915 4576 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kxphd" podStartSLOduration=2.133169516 podStartE2EDuration="7.897896314s" podCreationTimestamp="2025-12-03 10:24:11 +0000 UTC" firstStartedPulling="2025-12-03 10:24:12.756318618 +0000 UTC m=+6260.142295602" lastFinishedPulling="2025-12-03 10:24:18.521045406 +0000 UTC m=+6265.907022400" observedRunningTime="2025-12-03 10:24:18.879095586 +0000 UTC m=+6266.265072580" watchObservedRunningTime="2025-12-03 10:24:18.897896314 +0000 UTC m=+6266.283873288" Dec 03 10:24:21 crc kubenswrapper[4576]: I1203 10:24:21.861489 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:21 crc kubenswrapper[4576]: I1203 10:24:21.861856 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:22 crc kubenswrapper[4576]: I1203 10:24:22.907033 4576 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kxphd" podUID="d0fe4920-4f32-424e-a9bd-467aa3689506" containerName="registry-server" probeResult="failure" output=< Dec 03 10:24:22 crc kubenswrapper[4576]: timeout: failed to connect service ":50051" within 1s Dec 03 10:24:22 crc kubenswrapper[4576]: > Dec 03 10:24:31 crc kubenswrapper[4576]: I1203 10:24:31.932569 4576 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:31 crc kubenswrapper[4576]: I1203 10:24:31.997575 4576 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:32 crc kubenswrapper[4576]: I1203 10:24:32.171205 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.019161 4576 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kxphd" podUID="d0fe4920-4f32-424e-a9bd-467aa3689506" containerName="registry-server" containerID="cri-o://c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd" gracePeriod=2 Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.538883 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.726053 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwvbn\" (UniqueName: \"kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn\") pod \"d0fe4920-4f32-424e-a9bd-467aa3689506\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.726815 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities\") pod \"d0fe4920-4f32-424e-a9bd-467aa3689506\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.726951 4576 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content\") pod \"d0fe4920-4f32-424e-a9bd-467aa3689506\" (UID: \"d0fe4920-4f32-424e-a9bd-467aa3689506\") " Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.728725 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities" (OuterVolumeSpecName: "utilities") pod "d0fe4920-4f32-424e-a9bd-467aa3689506" (UID: "d0fe4920-4f32-424e-a9bd-467aa3689506"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.740031 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn" (OuterVolumeSpecName: "kube-api-access-vwvbn") pod "d0fe4920-4f32-424e-a9bd-467aa3689506" (UID: "d0fe4920-4f32-424e-a9bd-467aa3689506"). InnerVolumeSpecName "kube-api-access-vwvbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.833634 4576 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwvbn\" (UniqueName: \"kubernetes.io/projected/d0fe4920-4f32-424e-a9bd-467aa3689506-kube-api-access-vwvbn\") on node \"crc\" DevicePath \"\"" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.833661 4576 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.862828 4576 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d0fe4920-4f32-424e-a9bd-467aa3689506" (UID: "d0fe4920-4f32-424e-a9bd-467aa3689506"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 10:24:33 crc kubenswrapper[4576]: I1203 10:24:33.939166 4576 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0fe4920-4f32-424e-a9bd-467aa3689506-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.035343 4576 generic.go:334] "Generic (PLEG): container finished" podID="d0fe4920-4f32-424e-a9bd-467aa3689506" containerID="c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd" exitCode=0 Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.035437 4576 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kxphd" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.035434 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerDied","Data":"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd"} Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.035488 4576 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kxphd" event={"ID":"d0fe4920-4f32-424e-a9bd-467aa3689506","Type":"ContainerDied","Data":"1480de08bfc1b91803a57790119b772c83ba6df8d97b9a5eb8affe510782f36f"} Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.035511 4576 scope.go:117] "RemoveContainer" containerID="c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.080284 4576 scope.go:117] "RemoveContainer" containerID="ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.099689 4576 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.108399 4576 scope.go:117] "RemoveContainer" containerID="a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.109874 4576 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kxphd"] Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.161177 4576 scope.go:117] "RemoveContainer" containerID="c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd" Dec 03 10:24:34 crc kubenswrapper[4576]: E1203 10:24:34.161706 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd\": container with ID starting with c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd not found: ID does not exist" containerID="c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.161737 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd"} err="failed to get container status \"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd\": rpc error: code = NotFound desc = could not find container \"c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd\": container with ID starting with c5d9c932e94f2cab1ccd56e85a34e13c7cc172ff2ab8f96d57a910016824f8dd not found: ID does not exist" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.161758 4576 scope.go:117] "RemoveContainer" containerID="ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca" Dec 03 10:24:34 crc kubenswrapper[4576]: E1203 10:24:34.162285 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca\": container with ID starting with ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca not found: ID does not exist" containerID="ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.162394 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca"} err="failed to get container status \"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca\": rpc error: code = NotFound desc = could not find container \"ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca\": container with ID starting with ca900d10539ec38162828145aead8e4aa37ed1ac020a1e5928835a7df8da9cca not found: ID does not exist" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.162475 4576 scope.go:117] "RemoveContainer" containerID="a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0" Dec 03 10:24:34 crc kubenswrapper[4576]: E1203 10:24:34.162883 4576 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0\": container with ID starting with a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0 not found: ID does not exist" containerID="a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0" Dec 03 10:24:34 crc kubenswrapper[4576]: I1203 10:24:34.162909 4576 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0"} err="failed to get container status \"a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0\": rpc error: code = NotFound desc = could not find container \"a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0\": container with ID starting with a021878fec8eed6f06daadd99b168de255b7fdfb829553947c5936026ef181b0 not found: ID does not exist" Dec 03 10:24:35 crc kubenswrapper[4576]: I1203 10:24:35.691989 4576 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0fe4920-4f32-424e-a9bd-467aa3689506" path="/var/lib/kubelet/pods/d0fe4920-4f32-424e-a9bd-467aa3689506/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114010006024431 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114010007017347 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113773131016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113773131015457 5ustar corecore